コード例 #1
0
 def setUp(self):
     """Set up self._trie with 111 tokens, one of them a blessed version."""
     self._trie = pytrie.StringTrie()
     self._store = EphemeralStore()
     blessed_version = BlessedVersion(MasterHandler._BLESSED_VERSION,
                                      MasterHandler._MASTER_OWNER)
     for i in range(0, 10):
         some_token = Token(blessed_version.advance_version(),
                            '/some_dir/some_token_%d' % i,
                            priority=i,
                            data='some_data_%d' % i)
         self._trie[some_token.name] = some_token
         self._store.commit_tokens(updates=[some_token])
         for j in range(0, 10):
             some_other_token = Token(
                 blessed_version.advance_version(),
                 '/some_dir/some_token_%d/some_other_token_%d' % (i, j),
                 priority=j,
                 data='some_data_%d_%d' % (i, j))
             self._trie[some_other_token.name] = some_other_token
             self._store.commit_tokens(updates=[some_other_token])
     blessed_version.advance_version()
     self._trie[MasterHandler._BLESSED_VERSION] = blessed_version
     self._store.commit_tokens(updates=[blessed_version])
     self._check_version_uniqueness()
コード例 #2
0
ファイル: worker_test.py プロジェクト: yonglehou/pinball
 def setUp(self):
     self._factory = Factory()
     self._store = EphemeralStore()
     self._factory.create_master(self._store)
     self._emailer = mock.Mock()
     self._worker = Worker(self._factory.get_client(), self._store,
                           self._emailer)
     self._client = self._factory.get_client()
コード例 #3
0
    def _add_active_workflow_tokens(self):
        """Add some active workflow tokens.

        The job dependencies form a complete binary tree turned upside down.
        I.e., each job has two parents.
        """
        self._store = EphemeralStore()
        version = 1
        for level in range(AnalyzerTestCase._NUM_LEVELS):
            jobs_at_level = 2**(AnalyzerTestCase._NUM_LEVELS - level - 1)
            for job_index in range(jobs_at_level):
                job_name = 'job_%d_%d' % (level, job_index)
                event_name = Name(workflow='some_workflow',
                                  instance='123',
                                  job=job_name,
                                  event='some_event')
                if level == 0:
                    inputs = [
                        Name.WORKFLOW_START_INPUT,
                        Name.WORKFLOW_START_INPUT + '_prime'
                    ]
                    event_name.input = Name.WORKFLOW_START_INPUT
                else:
                    inputs = [
                        'job_%d_%d' % (level - 1, 2 * job_index),
                        'job_%d_%d' % (level - 1, 2 * job_index + 1)
                    ]
                    event_name.input = 'job_%d_%d' % (level - 1, 2 * job_index)
                if level == AnalyzerTestCase._NUM_LEVELS - 1:
                    outputs = []
                else:
                    outputs = ['job_%d_%d' % (level + 1, job_index / 2)]
                job = ShellJob(name=job_name,
                               inputs=inputs,
                               outputs=outputs,
                               command='some_command')
                job.history.append(ExecutionRecord())
                name = Name(workflow='some_workflow',
                            instance='123',
                            job_state=Name.WAITING_STATE,
                            job=job_name)
                job_token = Token(version=version,
                                  name=name.get_job_token_name(),
                                  priority=10,
                                  data=pickle.dumps(job))
                version += 1
                event = Event('some_event')
                event_token = Token(version=version,
                                    name=event_name.get_event_token_name(),
                                    priority=10,
                                    data=pickle.dumps(event))
                self._store.commit_tokens([job_token, event_token])
コード例 #4
0
 def setUp(self):
     """Set up self._trie with 111 tokens, one of them a blessed version."""
     self._trie = pytrie.StringTrie()
     self._store = EphemeralStore()
     blessed_version = BlessedVersion(MasterHandler._BLESSED_VERSION,
                                      MasterHandler._MASTER_OWNER)
     for i in range(0, 10):
         some_token = Token(blessed_version.advance_version(),
                            '/some_dir/some_token_%d' % i,
                            priority=i,
                            data='some_data_%d' % i)
         self._trie[some_token.name] = some_token
         self._store.commit_tokens(updates=[some_token])
         for j in range(0, 10):
             some_other_token = Token(
                 blessed_version.advance_version(),
                 '/some_dir/some_token_%d/some_other_token_%d' % (i, j),
                 priority=j,
                 data='some_data_%d_%d' % (i, j))
             self._trie[some_other_token.name] = some_other_token
             self._store.commit_tokens(updates=[some_other_token])
     blessed_version.advance_version()
     self._trie[MasterHandler._BLESSED_VERSION] = blessed_version
     self._store.commit_tokens(updates=[blessed_version])
     self._check_version_uniqueness()
コード例 #5
0
 def test_group(self):
     request = GroupRequest()
     request.namePrefix = '/'
     handler = MasterHandler(EphemeralStore())
     response = handler.group(request)
     self.assertEqual(1, len(response.counts))
     self.assertEqual(1, response.counts.values()[0])
コード例 #6
0
ファイル: worker_test.py プロジェクト: Betterment/pinball
 def setUp(self):
     self._factory = Factory()
     self._store = EphemeralStore()
     self._factory.create_master(self._store)
     self._emailer = mock.Mock()
     self._worker = Worker(self._factory.get_client(), self._store,
                           self._emailer)
     self._client = self._factory.get_client()
コード例 #7
0
 def setUp(self):
     self._factory = Factory()
     store = EphemeralStore()
     self._factory.create_master(store)
     emailer = Emailer('some_host', '8080')
     self._scheduler = Scheduler(self._factory.get_client(), store, emailer)
     self._client = self._factory.get_client()
     self._post_schedule_token()
コード例 #8
0
 def test_query(self):
     query = Query()
     query.namePrefix = ''
     query.maxTokens = 10
     request = QueryRequest()
     request.queries = [query]
     handler = MasterHandler(EphemeralStore())
     response = handler.query(request)
     self.assertEqual(1, len(response.tokens))
コード例 #9
0
 def test_query_and_own(self):
     query = Query()
     query.namePrefix = ''
     query.maxTokens = 10
     request = QueryAndOwnRequest()
     request.owner = 'some_owner'
     request.expirationTime = sys.maxint
     request.query = query
     handler = MasterHandler(EphemeralStore())
     response = handler.query_and_own(request)
     self.assertEqual(0, len(response.tokens))
コード例 #10
0
ファイル: analyzer_test.py プロジェクト: runt18/pinball
    def _add_active_workflow_tokens(self):
        """Add some active workflow tokens.

        The job dependencies form a complete binary tree turned upside down.
        I.e., each job has two parents.
        """
        self._store = EphemeralStore()
        version = 1
        for level in range(AnalyzerTestCase._NUM_LEVELS):
            jobs_at_level = 2 ** (AnalyzerTestCase._NUM_LEVELS - level - 1)
            for job_index in range(jobs_at_level):
                job_name = 'job_{0:d}_{1:d}'.format(level, job_index)
                event_name = Name(workflow='some_workflow',
                                  instance='123',
                                  job=job_name,
                                  event='some_event')
                if level == 0:
                    inputs = [Name.WORKFLOW_START_INPUT,
                              Name.WORKFLOW_START_INPUT + '_prime']
                    event_name.input = Name.WORKFLOW_START_INPUT
                else:
                    inputs = ['job_{0:d}_{1:d}'.format(level - 1, 2 * job_index),
                              'job_{0:d}_{1:d}'.format(level - 1, 2 * job_index + 1)]
                    event_name.input = 'job_{0:d}_{1:d}'.format(level - 1, 2 * job_index)
                if level == AnalyzerTestCase._NUM_LEVELS - 1:
                    outputs = []
                else:
                    outputs = ['job_{0:d}_{1:d}'.format(level + 1, job_index / 2)]
                job = ShellJob(name=job_name,
                               inputs=inputs,
                               outputs=outputs,
                               command='some_command')
                job.history.append(ExecutionRecord())
                name = Name(workflow='some_workflow', instance='123',
                            job_state=Name.WAITING_STATE, job=job_name)
                job_token = Token(version=version,
                                  name=name.get_job_token_name(),
                                  priority=10,
                                  data=pickle.dumps(job))
                version += 1
                event = Event('some_event')
                event_token = Token(version=version,
                                    name=event_name.get_event_token_name(),
                                    priority=10,
                                    data=pickle.dumps(event))
                self._store.commit_tokens([job_token, event_token])
コード例 #11
0
    def test_run(self, load_path_mock):
        config_parser = mock.Mock()
        load_path_mock.return_value = config_parser
        name = Name(workflow='some_workflow',
                    instance='123',
                    job_state=Name.WAITING_STATE,
                    job='some_job')
        config_parser.get_workflow_tokens.return_value = [
            Token(name=name.get_job_token_name())
        ]

        schedule = WorkflowSchedule(workflow='some_workflow')
        store = EphemeralStore()
        emailer = Emailer('some_host', '8080')
        request = schedule.run(emailer, store)
        self.assertEqual(load_path_mock.call_args_list, [
            mock.call('pinball_ext.workflow.parser.PyWorkflowParser', {},
                      'schedule')
        ])

        self.assertEqual(1, len(request.updates))
コード例 #12
0
    def test_run(self, load_path_mock):
        config_parser = mock.Mock()

        def load_path(params):
            self.assertEqual([], params.keys())
            return config_parser

        load_path_mock.return_value = load_path
        name = Name(workflow='some_workflow',
                    instance='123',
                    job_state=Name.WAITING_STATE,
                    job='some_job')
        config_parser.get_workflow_tokens.return_value = [
            Token(name=name.get_job_token_name())
        ]

        schedule = WorkflowSchedule(workflow='some_workflow')
        store = EphemeralStore()
        emailer = Emailer('some_host', '8080')
        request = schedule.run(emailer, store)

        self.assertEqual(1, len(request.updates))
コード例 #13
0
class AnalyzerTestCase(unittest.TestCase):
    _NUM_LEVELS = 3

    def setUp(self):
        self._store = EphemeralStore()

    def _add_active_workflow_tokens(self):
        """Add some active workflow tokens.

        The job dependencies form a complete binary tree turned upside down.
        I.e., each job has two parents.
        """
        self._store = EphemeralStore()
        version = 1
        for level in range(AnalyzerTestCase._NUM_LEVELS):
            jobs_at_level = 2**(AnalyzerTestCase._NUM_LEVELS - level - 1)
            for job_index in range(jobs_at_level):
                job_name = 'job_%d_%d' % (level, job_index)
                event_name = Name(workflow='some_workflow',
                                  instance='123',
                                  job=job_name,
                                  event='some_event')
                if level == 0:
                    inputs = [
                        Name.WORKFLOW_START_INPUT,
                        Name.WORKFLOW_START_INPUT + '_prime'
                    ]
                    event_name.input = Name.WORKFLOW_START_INPUT
                else:
                    inputs = [
                        'job_%d_%d' % (level - 1, 2 * job_index),
                        'job_%d_%d' % (level - 1, 2 * job_index + 1)
                    ]
                    event_name.input = 'job_%d_%d' % (level - 1, 2 * job_index)
                if level == AnalyzerTestCase._NUM_LEVELS - 1:
                    outputs = []
                else:
                    outputs = ['job_%d_%d' % (level + 1, job_index / 2)]
                job = ShellJob(name=job_name,
                               inputs=inputs,
                               outputs=outputs,
                               command='some_command')
                job.history.append(ExecutionRecord())
                name = Name(workflow='some_workflow',
                            instance='123',
                            job_state=Name.WAITING_STATE,
                            job=job_name)
                job_token = Token(version=version,
                                  name=name.get_job_token_name(),
                                  priority=10,
                                  data=pickle.dumps(job))
                version += 1
                event = Event('some_event')
                event_token = Token(version=version,
                                    name=event_name.get_event_token_name(),
                                    priority=10,
                                    data=pickle.dumps(event))
                self._store.commit_tokens([job_token, event_token])

    def _archive_tokens(self):
        tokens = self._store.read_active_tokens()
        self._store.archive_tokens(tokens)
        return tokens

    def _simulate(self):
        """Simulate execution of active jobs."""
        tokens = self._store.read_tokens()
        satisfied_deps = set()
        executed_jobs = []
        jobs = {}
        for token in tokens:
            event_name = Name.from_event_token_name(token.name)
            if event_name.event:
                satisfied_deps.add((event_name.input, event_name.job))
            else:
                job_name = Name.from_job_token_name(token.name)
                if job_name.job:
                    job = pickle.loads(token.data)
                    jobs[job.name] = job
        dep_counts = collections.defaultdict(int)
        while satisfied_deps:
            last_satisfied_deps = satisfied_deps
            satisfied_deps = set()
            for (_, job_name) in last_satisfied_deps:
                dep_counts[job_name] += 1
                if dep_counts[job_name] == 2:
                    executed_jobs.append(job_name)
                    job = jobs[job_name]
                    for output in job.outputs:
                        satisfied_deps.add((job_name, output))
        return executed_jobs

    def test_change_instance(self):
        self._add_active_workflow_tokens()
        self._archive_tokens()
        analyzer = Analyzer.from_store(self._store, 'some_workflow', '123')
        analyzer.change_instance('321')
        tokens = analyzer.get_tokens()
        self.assertLess(0, len(tokens))
        for token in tokens:
            name = Name.from_job_token_name(token.name)
            self.assertEqual('321', name.instance)

    def test_change_job_histories(self):
        self._add_active_workflow_tokens()
        self._archive_tokens()
        analyzer = Analyzer.from_store(self._store, 'some_workflow', '123')
        analyzer.clear_job_histories()
        tokens = analyzer.get_tokens()
        self.assertLess(0, len(tokens))
        for token in tokens:
            job = pickle.loads(token.data)
            self.assertEqual([], job.history)

    def test_poison_no_tokens(self):
        analyzer = Analyzer.from_store(self._store, 'some_workflow', '123')
        analyzer.poison([])

    def test_poison_no_roots(self):
        self._add_active_workflow_tokens()
        self._archive_tokens()
        analyzer = Analyzer.from_store(self._store, 'some_workflow', '123')
        analyzer.poison([])
        tokens = analyzer.get_tokens()
        self._store.commit_tokens(updates=tokens)
        executed_jobs = self._simulate()
        self.assertEqual([], executed_jobs)

    def test_poison_all(self):
        """Poison all top level jobs."""
        self._add_active_workflow_tokens()
        self._archive_tokens()
        analyzer = Analyzer.from_store(self._store, 'some_workflow', '123')

        roots = []
        for job_index in range(0, 2**(AnalyzerTestCase._NUM_LEVELS - 1)):
            roots.append('job_0_%d' % job_index)

        analyzer.poison(roots)
        tokens = analyzer.get_tokens()
        self._store.commit_tokens(updates=tokens)
        executed_jobs = self._simulate()
        # We expect that every job has run.
        expected_num_executed_jobs = 2**(AnalyzerTestCase._NUM_LEVELS) - 1
        self.assertEqual(expected_num_executed_jobs, len(executed_jobs))

    def test_poison_subset(self):
        """Poison every second top level job."""
        self._add_active_workflow_tokens()
        self._archive_tokens()
        analyzer = Analyzer.from_store(self._store, 'some_workflow', '123')

        roots = []
        for job_index in range(0, 2**(AnalyzerTestCase._NUM_LEVELS - 1), 2):
            roots.append('job_0_%d' % job_index)

        analyzer.poison(roots)
        tokens = analyzer.get_tokens()
        self._store.commit_tokens(updates=tokens)
        executed_jobs = self._simulate()
        # We expect that every second job at the top level and every job at
        # a lower level was run.
        expected_num_executed_jobs = (
            2**(AnalyzerTestCase._NUM_LEVELS - 1) - 1 +
            2**(AnalyzerTestCase._NUM_LEVELS - 1) / 2)
        self.assertEqual(expected_num_executed_jobs, len(executed_jobs))

    def test_poison_get_new_event_tokens(self):
        """Poison all top level jobs and get new event tokens."""
        self._add_active_workflow_tokens()
        tokens = self._archive_tokens()
        analyzer = Analyzer.from_store(self._store, 'some_workflow', '123')
        analyzer._filter_event_tokens(tokens)

        roots = []
        for job_index in range(0, 2**(AnalyzerTestCase._NUM_LEVELS - 1)):
            roots.append('job_0_%d' % job_index)

        analyzer.poison(roots)
        tokens = analyzer.get_new_event_tokens()
        expected_num_new_event_tokens = 2**AnalyzerTestCase._NUM_LEVELS
        self.assertEqual(expected_num_new_event_tokens, len(tokens))
コード例 #14
0
ファイル: worker_test.py プロジェクト: Betterment/pinball
class WorkerTestCase(unittest.TestCase):
    def setUp(self):
        self._factory = Factory()
        self._store = EphemeralStore()
        self._factory.create_master(self._store)
        self._emailer = mock.Mock()
        self._worker = Worker(self._factory.get_client(), self._store,
                              self._emailer)
        self._client = self._factory.get_client()

    def _get_parent_job_token(self):
        name = Name(workflow='some_workflow',
                    instance='12345',
                    job_state=Name.WAITING_STATE,
                    job='parent_job')
        job = ShellJob(name=name.job,
                       inputs=[Name.WORKFLOW_START_INPUT],
                       outputs=['child_job'],
                       command='echo parent',
                       emails=['*****@*****.**'])
        return Token(name=name.get_job_token_name(), data=pickle.dumps(job))

    def _get_child_job_token(self):
        name = Name(workflow='some_workflow',
                    instance='12345',
                    job_state=Name.WAITING_STATE,
                    job='child_job')
        job = ShellJob(name=name.job,
                       inputs=['parent_job'],
                       outputs=[],
                       command='echo child',
                       emails=['*****@*****.**'])
        return Token(name=name.get_job_token_name(), data=pickle.dumps(job))

    def _post_job_tokens(self):
        """Add waiting job tokens to the master."""
        request = ModifyRequest(updates=[])
        request.updates.append(self._get_parent_job_token())
        request.updates.append(self._get_child_job_token())
        self._client.modify(request)

    def _post_workflow_start_event_token(self):
        name = Name(workflow='some_workflow',
                    instance='12345',
                    job='parent_job',
                    input_name=Name.WORKFLOW_START_INPUT,
                    event='workflow_start_event')
        event = Event(creator='SimpleWorkflowTest')
        token = Token(name=name.get_event_token_name(),
                      data=pickle.dumps(event))
        request = ModifyRequest(updates=[token])
        self._client.modify(request)

    def _verify_token_names(self, names):
        request = GroupRequest(namePrefix='/workflow/')
        response = self._client.group(request)
        names = sorted(names)
        counts = sorted(response.counts.keys())
        self.assertEqual(names, counts)

    def _verify_archived_token_names(self, names):
        active_tokens = self._store.read_active_tokens()
        all_tokens = self._store.read_tokens()
        archived_token_names = []
        for token in all_tokens:
            if not token in active_tokens:
                archived_token_names.append(token.name)
        names = sorted(names)
        archived_token_names = sorted(archived_token_names)
        self.assertEqual(names, archived_token_names)

    def _get_token(self, name):
        query = Query(namePrefix=name)
        request = QueryRequest(queries=[query])
        response = self._client.query(request)
        self.assertEqual(1, len(response.tokens))
        self.assertEqual(1, len(response.tokens[0]))
        return response.tokens[0][0]

    def _get_stored_token(self, name):
        tokens = self._store.read_tokens(name_prefix=name)
        self.assertEqual(1, len(tokens))
        return tokens[0]

    def _verify_parent_job_waiting(self):
        token_names = [
            Name(workflow='some_workflow',
                 instance='12345',
                 job_state=Name.WAITING_STATE,
                 job='parent_job').get_job_token_name(),
            Name(workflow='some_workflow',
                 instance='12345',
                 job_state=Name.WAITING_STATE,
                 job='child_job').get_job_token_name(),
            Name(workflow='some_workflow',
                 instance='12345',
                 job='parent_job',
                 input_name=Name.WORKFLOW_START_INPUT,
                 event='workflow_start_event').get_event_token_name()]
        self._verify_token_names(token_names)

    def _verify_parent_job_runnable(self):
        token_names = [Name(workflow='some_workflow',
                            instance='12345',
                            job_state=Name.RUNNABLE_STATE,
                            job='parent_job').get_job_token_name(),
                       Name(workflow='some_workflow',
                            instance='12345',
                            job_state=Name.WAITING_STATE,
                            job='child_job').get_job_token_name()]
        self._verify_token_names(token_names)

    def test_get_triggering_events(self):
        self.assertEqual([], Worker._get_triggering_events([]))

        self.assertEqual(['a'], Worker._get_triggering_events([['a']]))

        events = Worker._get_triggering_events([['a', 'b']])
        self.assertTrue(events == ['a'] or events == ['b'])

        events = Worker._get_triggering_events([['a', 'b'], ['1', '2']])
        self.assertTrue(events == ['a', '1'] or events == ['a', '2'] or
                        events == ['b', '1'] or events == ['b', '2'])

    def test_move_job_token_to_runnable(self):
        self._post_job_tokens()
        self._post_workflow_start_event_token()
        job_name = Name(workflow='some_workflow',
                        instance='12345',
                        job_state=Name.WAITING_STATE,
                        job='parent_job')
        job_token = self._get_token(job_name.get_job_token_name())
        event_name = Name(workflow='some_workflow',
                          instance='12345',
                          job='parent_job',
                          input_name=Name.WORKFLOW_START_INPUT,
                          event='workflow_start_event')
        event_token = self._get_token(event_name.get_event_token_name())
        self._worker._move_job_token_to_runnable(job_token, [event_token])
        # Event token should have been removed and the parent job should be
        # runnable.
        self._verify_parent_job_runnable()

    def test_make_job_runnable(self):
        self._post_job_tokens()
        self._post_workflow_start_event_token()

        parent_job_name = Name(workflow='some_workflow',
                               instance='12345',
                               job_state=Name.WAITING_STATE,
                               job='parent_job').get_job_token_name()
        child_job_name = Name(workflow='some_workflow',
                              instance='12345',
                              job_state=Name.WAITING_STATE,
                              job='child_job').get_job_token_name()

        parent_job_token = self._get_token(parent_job_name)
        child_job_token = self._get_token(child_job_name)

        self._worker._make_job_runnable(child_job_token)
        # Child job is missing triggering tokens so it cannot be made runnable.
        self._verify_parent_job_waiting()

        self._worker._make_job_runnable(parent_job_token)
        # Parent job has all triggering tokens so it can be made runnable.
        self._verify_parent_job_runnable()

    def test_make_runnable(self):
        self._post_job_tokens()
        self._post_workflow_start_event_token()

        self._worker._make_runnable('some_other_workflow', '12345')
        # Workflow instance does not exist so nothing should have changed.
        self._verify_parent_job_waiting()

        self._worker._make_runnable('some_workflow', 'some_other_instance')
        # Workflow instance does not exist so nothing should have changed.
        self._verify_parent_job_waiting()

        self._worker._make_runnable('some_workflow', '12345')
        self._verify_parent_job_runnable()

    def test_own_runnable_job_token(self):
        self._post_job_tokens()

        self._worker._own_runnable_job_token()
        # Event token is not present so nothing should have changed.
        token_names = [Name(workflow='some_workflow',
                            instance='12345',
                            job_state=Name.WAITING_STATE,
                            job='parent_job').get_job_token_name(),
                       Name(workflow='some_workflow',
                            instance='12345',
                            job_state=Name.WAITING_STATE,
                            job='child_job').get_job_token_name()]
        self._verify_token_names(token_names)
        self.assertIsNone(self._worker._owned_job_token)

        self._post_workflow_start_event_token()
        self._worker._own_runnable_job_token()
        # Worker should now own a runnable job token.
        self._verify_parent_job_runnable()
        parent_token = self._get_token(
            Name(workflow='some_workflow',
                 instance='12345',
                 job_state=Name.RUNNABLE_STATE,
                 job='parent_job').get_job_token_name())
        self.assertEqual(parent_token, self._worker._owned_job_token)

    def _add_history_to_owned_token(self):
        job = pickle.loads(self._worker._owned_job_token.data)
        execution_record = ExecutionRecord(start_time=123456,
                                           end_time=1234567,
                                           exit_code=0)
        job.history.append(execution_record)
        self._worker._owned_job_token.data = pickle.dumps(job)

    def test_get_output_event_tokens(self):
        self._post_job_tokens()
        self._post_workflow_start_event_token()
        self._worker._own_runnable_job_token()
        self.assertIsNotNone(self._worker._owned_job_token)

        job = pickle.loads(self._worker._owned_job_token.data)
        execution_record = ExecutionRecord(start_time=123456,
                                           end_time=1234567,
                                           exit_code=0)
        job.history.append(execution_record)

        event_tokens = self._worker._get_output_event_tokens(job)
        self.assertEqual(1, len(event_tokens))
        event_token_name = Name.from_event_token_name(event_tokens[0].name)
        expected_prefix = Name(workflow='some_workflow',
                               instance='12345',
                               job='child_job',
                               input_name='parent_job').get_input_prefix()
        self.assertEqual(expected_prefix, event_token_name.get_input_prefix())

    def test_move_job_token_to_waiting(self):
        self._post_job_tokens()
        self._post_workflow_start_event_token()
        self._worker._own_runnable_job_token()

        job = pickle.loads(self._worker._owned_job_token.data)
        execution_record = ExecutionRecord(start_time=123456,
                                           end_time=1234567,
                                           exit_code=0)
        job.history.append(execution_record)
        self._worker._owned_job_token.data = pickle.dumps(job)

        self._worker._move_job_token_to_waiting(job, True)

        parent_token = self._get_token(
            Name(workflow='some_workflow',
                 instance='12345',
                 job_state=Name.WAITING_STATE,
                 job='parent_job').get_job_token_name())
        job = pickle.loads(parent_token.data)
        self.assertEqual(1, len(job.history))
        self.assertEqual(execution_record.start_time,
                         job.history[0].start_time)

    def test_keep_job_token_in_runnable(self):
        self._post_job_tokens()
        self._post_workflow_start_event_token()
        self._worker._own_runnable_job_token()

        job = pickle.loads(self._worker._owned_job_token.data)
        job.history.append('some_historic_record')

        self._worker._keep_job_token_in_runnable(job)

        self._verify_parent_job_runnable()
        parent_token = self._get_token(
            Name(workflow='some_workflow',
                 instance='12345',
                 job_state=Name.RUNNABLE_STATE,
                 job='parent_job').get_job_token_name())
        job = pickle.loads(parent_token.data)
        self.assertEqual(1, len(job.history))
        self.assertEqual('some_historic_record', job.history[0])

    @staticmethod
    def _from_job(workflow, instance, job_name, job, data_builder, emailer):
        execution_record = ExecutionRecord(start_time=123456,
                                           end_time=1234567,
                                           exit_code=0)
        executed_job = copy.copy(job)
        executed_job.history.append(execution_record)
        job_executor = mock.Mock()
        job_executor.job = executed_job
        job_executor.prepare.return_value = True
        job_executor.execute.return_value = True
        return job_executor

    @mock.patch('pinball.workflow.worker.JobExecutor')
    def test_execute_job(self, job_executor_mock):
        self._post_job_tokens()
        self._post_workflow_start_event_token()
        self._worker._own_runnable_job_token()
        job_executor = mock.Mock()
        job_executor_mock.from_job.return_value = job_executor

        job_executor_mock.from_job.side_effect = WorkerTestCase._from_job

        self._worker._execute_job()

        self.assertIsNone(self._worker._owned_job_token)
        parent_token = self._get_token(
            Name(workflow='some_workflow',
                 instance='12345',
                 job_state=Name.WAITING_STATE,
                 job='parent_job').get_job_token_name())
        job = pickle.loads(parent_token.data)
        self.assertEqual(1, len(job.history))
        execution_record = job.history[0]
        self.assertEqual(0, execution_record.exit_code)
        self.assertEqual(1234567, execution_record.end_time)

    def test_send_instance_end_email(self):
        data_builder = mock.Mock()
        self._worker._data_builder = data_builder

        schedule_data = mock.Mock()
        schedule_data.emails = ['*****@*****.**']
        data_builder.get_schedule.return_value = schedule_data

        instance_data = mock.Mock()
        data_builder.get_instance.return_value = instance_data

        job_data = mock.Mock()
        data_builder.get_jobs.return_value = [job_data]

        self._worker._send_instance_end_email('some_workflow', '12345')

        self._emailer.send_instance_end_message.assert_called_once_with(
            ['*****@*****.**'], instance_data, [job_data])

    def test_send_job_failure_emails(self):
        self._post_job_tokens()
        self._post_workflow_start_event_token()
        self._worker._own_runnable_job_token()

        job = pickle.loads(self._worker._owned_job_token.data)
        job.history.append('some_historic_record')
        executor = mock.Mock()
        self._worker._executor = executor
        executor.job = job

        data_builder = mock.Mock()
        self._worker._data_builder = data_builder

        schedule_data = mock.Mock()
        schedule_data.emails = ['*****@*****.**']
        data_builder.get_schedule.return_value = schedule_data

        execution_data = mock.Mock()
        data_builder.get_execution.return_value = execution_data

        self._worker._send_job_failure_emails(True)

        self._emailer.send_job_execution_end_message.assert_any_call(
            ['*****@*****.**',
             '*****@*****.**'], execution_data)

    @mock.patch('pinball.workflow.worker.JobExecutor')
    def test_run(self, job_executor_mock):
        self._post_job_tokens()
        self._post_workflow_start_event_token()

        job_executor_mock.from_job.side_effect = WorkerTestCase._from_job

        self._worker._test_only_end_if_no_runnable = True
        self._worker.run()
        with mock.patch('pinball.workflow.archiver.time') as time_patch:
            # add one day
            time_patch.time.return_value = time.time() + 24 * 60 * 60
            self._worker.run()

        parent_job_token_name = Name(workflow='some_workflow',
                                     instance='12345',
                                     job_state=Name.WAITING_STATE,
                                     job='parent_job').get_job_token_name()
        child_job_token_name = Name(workflow='some_workflow',
                                    instance='12345',
                                    job_state=Name.WAITING_STATE,
                                    job='child_job').get_job_token_name()
        signal_string = Signal.action_to_string(Signal.ARCHIVE)
        signal_token_name = Name(workflow='some_workflow',
                                 instance='12345',
                                 signal=signal_string).get_signal_token_name()

        token_names = [parent_job_token_name,
                       child_job_token_name,
                       signal_token_name]
        self._verify_archived_token_names(token_names)

        self.assertEqual(2, job_executor_mock.from_job.call_count)

        parent_token = self._get_stored_token(parent_job_token_name)
        job = pickle.loads(parent_token.data)
        self.assertEqual(1, len(job.history))
        execution_record = job.history[0]
        self.assertEqual(0, execution_record.exit_code)
        self.assertEqual(1234567, execution_record.end_time)

        child_token = self._get_stored_token(child_job_token_name)
        job = pickle.loads(child_token.data)
        self.assertEqual(1, len(job.history))
        execution_record = job.history[0]
        self.assertEqual(0, execution_record.exit_code)
        self.assertEqual(1234567, execution_record.end_time)

        signal_token = self._get_stored_token(signal_token_name)
        signal = pickle.loads(signal_token.data)
        self.assertEqual(Signal.ARCHIVE, signal.action)
コード例 #15
0
ファイル: worker_test.py プロジェクト: yonglehou/pinball
class WorkerTestCase(unittest.TestCase):
    def setUp(self):
        self._factory = Factory()
        self._store = EphemeralStore()
        self._factory.create_master(self._store)
        self._emailer = mock.Mock()
        self._worker = Worker(self._factory.get_client(), self._store,
                              self._emailer)
        self._client = self._factory.get_client()

    def _get_parent_job_token(self):
        name = Name(workflow='some_workflow',
                    instance='12345',
                    job_state=Name.WAITING_STATE,
                    job='parent_job')
        job = ShellJob(name=name.job,
                       inputs=[Name.WORKFLOW_START_INPUT],
                       outputs=['child_job'],
                       command='echo parent',
                       emails=['*****@*****.**'])
        return Token(name=name.get_job_token_name(), data=pickle.dumps(job))

    def _get_child_job_token(self):
        name = Name(workflow='some_workflow',
                    instance='12345',
                    job_state=Name.WAITING_STATE,
                    job='child_job')
        job = ShellJob(name=name.job,
                       inputs=['parent_job'],
                       outputs=[],
                       command='echo child',
                       emails=['*****@*****.**'])
        return Token(name=name.get_job_token_name(), data=pickle.dumps(job))

    def _post_job_tokens(self):
        """Add waiting job tokens to the master."""
        request = ModifyRequest(updates=[])
        request.updates.append(self._get_parent_job_token())
        request.updates.append(self._get_child_job_token())
        self._client.modify(request)

    def _post_workflow_start_event_token(self):
        name = Name(workflow='some_workflow',
                    instance='12345',
                    job='parent_job',
                    input_name=Name.WORKFLOW_START_INPUT,
                    event='workflow_start_event')
        event = Event(creator='SimpleWorkflowTest')
        token = Token(name=name.get_event_token_name(),
                      data=pickle.dumps(event))
        request = ModifyRequest(updates=[token])
        self._client.modify(request)

    def _verify_token_names(self, names):
        request = GroupRequest(namePrefix='/workflow/')
        response = self._client.group(request)
        names = sorted(names)
        counts = sorted(response.counts.keys())
        self.assertEqual(names, counts)

    def _verify_archived_token_names(self, names):
        active_tokens = self._store.read_active_tokens()
        all_tokens = self._store.read_tokens()
        archived_token_names = []
        for token in all_tokens:
            if not token in active_tokens:
                archived_token_names.append(token.name)
        names = sorted(names)
        archived_token_names = sorted(archived_token_names)
        self.assertEqual(names, archived_token_names)

    def _get_token(self, name):
        query = Query(namePrefix=name)
        request = QueryRequest(queries=[query])
        response = self._client.query(request)
        self.assertEqual(1, len(response.tokens))
        self.assertEqual(1, len(response.tokens[0]))
        return response.tokens[0][0]

    def _get_stored_token(self, name):
        tokens = self._store.read_tokens(name_prefix=name)
        self.assertEqual(1, len(tokens))
        return tokens[0]

    def _verify_parent_job_waiting(self):
        token_names = [
            Name(workflow='some_workflow',
                 instance='12345',
                 job_state=Name.WAITING_STATE,
                 job='parent_job').get_job_token_name(),
            Name(workflow='some_workflow',
                 instance='12345',
                 job_state=Name.WAITING_STATE,
                 job='child_job').get_job_token_name(),
            Name(workflow='some_workflow',
                 instance='12345',
                 job='parent_job',
                 input_name=Name.WORKFLOW_START_INPUT,
                 event='workflow_start_event').get_event_token_name()
        ]
        self._verify_token_names(token_names)

    def _verify_parent_job_runnable(self):
        token_names = [
            Name(workflow='some_workflow',
                 instance='12345',
                 job_state=Name.RUNNABLE_STATE,
                 job='parent_job').get_job_token_name(),
            Name(workflow='some_workflow',
                 instance='12345',
                 job_state=Name.WAITING_STATE,
                 job='child_job').get_job_token_name()
        ]
        self._verify_token_names(token_names)

    def test_get_triggering_events(self):
        self.assertEqual([], Worker._get_triggering_events([]))

        self.assertEqual(['a'], Worker._get_triggering_events([['a']]))

        events = Worker._get_triggering_events([['a', 'b']])
        self.assertTrue(events == ['a'] or events == ['b'])

        events = Worker._get_triggering_events([['a', 'b'], ['1', '2']])
        self.assertTrue(events == ['a', '1'] or events == ['a', '2']
                        or events == ['b', '1'] or events == ['b', '2'])

    def test_move_job_token_to_runnable(self):
        self._post_job_tokens()
        self._post_workflow_start_event_token()
        job_name = Name(workflow='some_workflow',
                        instance='12345',
                        job_state=Name.WAITING_STATE,
                        job='parent_job')
        job_token = self._get_token(job_name.get_job_token_name())
        event_name = Name(workflow='some_workflow',
                          instance='12345',
                          job='parent_job',
                          input_name=Name.WORKFLOW_START_INPUT,
                          event='workflow_start_event')
        event_token = self._get_token(event_name.get_event_token_name())
        self._worker._move_job_token_to_runnable(job_token, [event_token])
        # Event token should have been removed and the parent job should be
        # runnable.
        self._verify_parent_job_runnable()

    def test_make_job_runnable(self):
        self._post_job_tokens()
        self._post_workflow_start_event_token()

        parent_job_name = Name(workflow='some_workflow',
                               instance='12345',
                               job_state=Name.WAITING_STATE,
                               job='parent_job').get_job_token_name()
        child_job_name = Name(workflow='some_workflow',
                              instance='12345',
                              job_state=Name.WAITING_STATE,
                              job='child_job').get_job_token_name()

        parent_job_token = self._get_token(parent_job_name)
        child_job_token = self._get_token(child_job_name)

        self._worker._make_job_runnable(child_job_token)
        # Child job is missing triggering tokens so it cannot be made runnable.
        self._verify_parent_job_waiting()

        self._worker._make_job_runnable(parent_job_token)
        # Parent job has all triggering tokens so it can be made runnable.
        self._verify_parent_job_runnable()

    def test_make_runnable(self):
        self._post_job_tokens()
        self._post_workflow_start_event_token()

        self._worker._make_runnable('some_other_workflow', '12345')
        # Workflow instance does not exist so nothing should have changed.
        self._verify_parent_job_waiting()

        self._worker._make_runnable('some_workflow', 'some_other_instance')
        # Workflow instance does not exist so nothing should have changed.
        self._verify_parent_job_waiting()

        self._worker._make_runnable('some_workflow', '12345')
        self._verify_parent_job_runnable()

    def test_own_runnable_job_token(self):
        self._post_job_tokens()

        self._worker._own_runnable_job_token()
        # Event token is not present so nothing should have changed.
        token_names = [
            Name(workflow='some_workflow',
                 instance='12345',
                 job_state=Name.WAITING_STATE,
                 job='parent_job').get_job_token_name(),
            Name(workflow='some_workflow',
                 instance='12345',
                 job_state=Name.WAITING_STATE,
                 job='child_job').get_job_token_name()
        ]
        self._verify_token_names(token_names)
        self.assertIsNone(self._worker._owned_job_token)

        self._post_workflow_start_event_token()
        self._worker._own_runnable_job_token()
        # Worker should now own a runnable job token.
        self._verify_parent_job_runnable()
        parent_token = self._get_token(
            Name(workflow='some_workflow',
                 instance='12345',
                 job_state=Name.RUNNABLE_STATE,
                 job='parent_job').get_job_token_name())
        self.assertEqual(parent_token, self._worker._owned_job_token)

    def _add_history_to_owned_token(self):
        job = pickle.loads(self._worker._owned_job_token.data)
        execution_record = ExecutionRecord(start_time=123456,
                                           end_time=1234567,
                                           exit_code=0)
        job.history.append(execution_record)
        self._worker._owned_job_token.data = pickle.dumps(job)

    def test_get_output_event_tokens(self):
        self._post_job_tokens()
        self._post_workflow_start_event_token()
        self._worker._own_runnable_job_token()
        self.assertIsNotNone(self._worker._owned_job_token)

        job = pickle.loads(self._worker._owned_job_token.data)
        execution_record = ExecutionRecord(start_time=123456,
                                           end_time=1234567,
                                           exit_code=0)
        job.history.append(execution_record)

        event_tokens = self._worker._get_output_event_tokens(job)
        self.assertEqual(1, len(event_tokens))
        event_token_name = Name.from_event_token_name(event_tokens[0].name)
        expected_prefix = Name(workflow='some_workflow',
                               instance='12345',
                               job='child_job',
                               input_name='parent_job').get_input_prefix()
        self.assertEqual(expected_prefix, event_token_name.get_input_prefix())

    def test_move_job_token_to_waiting(self):
        self._post_job_tokens()
        self._post_workflow_start_event_token()
        self._worker._own_runnable_job_token()

        job = pickle.loads(self._worker._owned_job_token.data)
        execution_record = ExecutionRecord(start_time=123456,
                                           end_time=1234567,
                                           exit_code=0)
        job.history.append(execution_record)
        self._worker._owned_job_token.data = pickle.dumps(job)

        self._worker._move_job_token_to_waiting(job, True)

        parent_token = self._get_token(
            Name(workflow='some_workflow',
                 instance='12345',
                 job_state=Name.WAITING_STATE,
                 job='parent_job').get_job_token_name())
        job = pickle.loads(parent_token.data)
        self.assertEqual(1, len(job.history))
        self.assertEqual(execution_record.start_time,
                         job.history[0].start_time)

    def test_keep_job_token_in_runnable(self):
        self._post_job_tokens()
        self._post_workflow_start_event_token()
        self._worker._own_runnable_job_token()

        job = pickle.loads(self._worker._owned_job_token.data)
        job.history.append('some_historic_record')

        self._worker._keep_job_token_in_runnable(job)

        self._verify_parent_job_runnable()
        parent_token = self._get_token(
            Name(workflow='some_workflow',
                 instance='12345',
                 job_state=Name.RUNNABLE_STATE,
                 job='parent_job').get_job_token_name())
        job = pickle.loads(parent_token.data)
        self.assertEqual(1, len(job.history))
        self.assertEqual('some_historic_record', job.history[0])

    @staticmethod
    def _from_job(workflow, instance, job_name, job, data_builder, emailer):
        execution_record = ExecutionRecord(start_time=123456,
                                           end_time=1234567,
                                           exit_code=0)
        executed_job = copy.copy(job)
        executed_job.history.append(execution_record)
        job_executor = mock.Mock()
        job_executor.job = executed_job
        job_executor.prepare.return_value = True
        job_executor.execute.return_value = True
        return job_executor

    @mock.patch('pinball.workflow.worker.JobExecutor')
    def test_execute_job(self, job_executor_mock):
        self._post_job_tokens()
        self._post_workflow_start_event_token()
        self._worker._own_runnable_job_token()
        job_executor = mock.Mock()
        job_executor_mock.from_job.return_value = job_executor

        job_executor_mock.from_job.side_effect = WorkerTestCase._from_job

        self._worker._execute_job()

        self.assertIsNone(self._worker._owned_job_token)
        parent_token = self._get_token(
            Name(workflow='some_workflow',
                 instance='12345',
                 job_state=Name.WAITING_STATE,
                 job='parent_job').get_job_token_name())
        job = pickle.loads(parent_token.data)
        self.assertEqual(1, len(job.history))
        execution_record = job.history[0]
        self.assertEqual(0, execution_record.exit_code)
        self.assertEqual(1234567, execution_record.end_time)

    def test_send_instance_end_email(self):
        data_builder = mock.Mock()
        self._worker._data_builder = data_builder

        schedule_data = mock.Mock()
        schedule_data.emails = ['*****@*****.**']
        data_builder.get_schedule.return_value = schedule_data

        instance_data = mock.Mock()
        data_builder.get_instance.return_value = instance_data

        job_data = mock.Mock()
        data_builder.get_jobs.return_value = [job_data]

        self._worker._send_instance_end_email('some_workflow', '12345')

        self._emailer.send_instance_end_message.assert_called_once_with(
            ['*****@*****.**'], instance_data, [job_data])

    def test_send_job_failure_emails(self):
        self._post_job_tokens()
        self._post_workflow_start_event_token()
        self._worker._own_runnable_job_token()

        job = pickle.loads(self._worker._owned_job_token.data)
        job.history.append('some_historic_record')
        executor = mock.Mock()
        self._worker._executor = executor
        executor.job = job

        data_builder = mock.Mock()
        self._worker._data_builder = data_builder

        schedule_data = mock.Mock()
        schedule_data.emails = ['*****@*****.**']
        data_builder.get_schedule.return_value = schedule_data

        execution_data = mock.Mock()
        data_builder.get_execution.return_value = execution_data

        self._worker._send_job_failure_emails(True)

        self._emailer.send_job_execution_end_message.assert_any_call(
            ['*****@*****.**', '*****@*****.**'],
            execution_data)

    @mock.patch('pinball.workflow.worker.JobExecutor')
    def test_run(self, job_executor_mock):
        self._post_job_tokens()
        self._post_workflow_start_event_token()

        job_executor_mock.from_job.side_effect = WorkerTestCase._from_job

        self._worker._test_only_end_if_no_runnable = True
        self._worker.run()
        with mock.patch('pinball.workflow.archiver.time') as time_patch:
            # add one day
            time_patch.time.return_value = time.time() + 24 * 60 * 60
            self._worker.run()

        parent_job_token_name = Name(workflow='some_workflow',
                                     instance='12345',
                                     job_state=Name.WAITING_STATE,
                                     job='parent_job').get_job_token_name()
        child_job_token_name = Name(workflow='some_workflow',
                                    instance='12345',
                                    job_state=Name.WAITING_STATE,
                                    job='child_job').get_job_token_name()
        signal_string = Signal.action_to_string(Signal.ARCHIVE)
        signal_token_name = Name(workflow='some_workflow',
                                 instance='12345',
                                 signal=signal_string).get_signal_token_name()

        token_names = [
            parent_job_token_name, child_job_token_name, signal_token_name
        ]
        self._verify_archived_token_names(token_names)

        self.assertEqual(2, job_executor_mock.from_job.call_count)

        parent_token = self._get_stored_token(parent_job_token_name)
        job = pickle.loads(parent_token.data)
        self.assertEqual(1, len(job.history))
        execution_record = job.history[0]
        self.assertEqual(0, execution_record.exit_code)
        self.assertEqual(1234567, execution_record.end_time)

        child_token = self._get_stored_token(child_job_token_name)
        job = pickle.loads(child_token.data)
        self.assertEqual(1, len(job.history))
        execution_record = job.history[0]
        self.assertEqual(0, execution_record.exit_code)
        self.assertEqual(1234567, execution_record.end_time)

        signal_token = self._get_stored_token(signal_token_name)
        signal = pickle.loads(signal_token.data)
        self.assertEqual(Signal.ARCHIVE, signal.action)
コード例 #16
0
 def setUp(self):
     self._store = EphemeralStore()
     self._data_builder = DataBuilder(self._store)
コード例 #17
0
class TransactionTestCase(unittest.TestCase):
    def setUp(self):
        """Set up self._trie with 111 tokens, one of them a blessed version."""
        self._trie = pytrie.StringTrie()
        self._store = EphemeralStore()
        blessed_version = BlessedVersion(MasterHandler._BLESSED_VERSION,
                                         MasterHandler._MASTER_OWNER)
        for i in range(0, 10):
            some_token = Token(blessed_version.advance_version(),
                               '/some_dir/some_token_%d' % i,
                               priority=i,
                               data='some_data_%d' % i)
            self._trie[some_token.name] = some_token
            self._store.commit_tokens(updates=[some_token])
            for j in range(0, 10):
                some_other_token = Token(
                    blessed_version.advance_version(),
                    '/some_dir/some_token_%d/some_other_token_%d' % (i, j),
                    priority=j,
                    data='some_data_%d_%d' % (i, j))
                self._trie[some_other_token.name] = some_other_token
                self._store.commit_tokens(updates=[some_other_token])
        blessed_version.advance_version()
        self._trie[MasterHandler._BLESSED_VERSION] = blessed_version
        self._store.commit_tokens(updates=[blessed_version])
        self._check_version_uniqueness()

    def _check_version_uniqueness(self):
        """Check self._trie.values() have distinct version values."""
        versions = set()
        for token in self._trie.values():
            versions.add(token.version)
        self.assertEqual(len(self._trie), len(versions))

    def _get_blessed_version(self):
        return self._trie[MasterHandler._BLESSED_VERSION]

    # Archive tests.
    def test_archive_empty(self):
        request = ArchiveRequest()
        transaction = ArchiveTransaction()
        # Make sure that prepare and commit do not throw an exception.
        transaction.prepare(request)
        transaction.commit(self._trie, self._get_blessed_version(),
                           self._store)

    def test_archive(self):
        request = ArchiveRequest(tokens=[])
        n_tokens_before = len(self._trie)
        some_token = copy.copy(self._trie['/some_dir/some_token_0'])
        request.tokens.append(some_token)
        some_other_token = copy.copy(
            self._trie['/some_dir/some_token_0/some_other_token_0'])
        request.tokens.append(some_other_token)
        transaction = ArchiveTransaction()
        transaction.prepare(request)
        transaction.commit(self._trie, self._get_blessed_version(),
                           self._store)
        n_tokens_after = len(self._trie)
        # We deleted two things from self._trie.
        self.assertEqual(n_tokens_before - 2, n_tokens_after)
        n_active_tokens = len(self._store.read_active_tokens())
        self.assertEqual(n_tokens_after, n_active_tokens)
        n_all_tokens = len(self._store.read_tokens())
        self.assertEqual(n_tokens_before, n_all_tokens)

    # Group tests.
    def test_group_empty(self):
        request = GroupRequest()
        transaction = GroupTransaction()
        # Make sure that prepare and commit do not throw an exception.
        transaction.prepare(request)
        transaction.commit(self._trie, self._get_blessed_version(),
                           self._store)

    def test_group(self):
        request = GroupRequest()
        request.namePrefix = '/some_dir/'
        request.groupSuffix = '/'
        transaction = GroupTransaction()
        transaction.prepare(request)
        response = transaction.commit(self._trie, self._get_blessed_version(),
                                      self._store)

        expected_groups = set()
        for i in range(0, 10):
            expected_groups.add('/some_dir/some_token_%d' % i)
        groups = set()
        for group, count in response.counts.iteritems():
            groups.add(group)
            self.assertEqual(11, count)
        self.assertEqual(expected_groups, groups)

    # Modify tests.
    def test_modity_empty(self):
        request = ModifyRequest()
        transaction = ModifyTransaction()
        # Make sure that prepare and commit do not throw an exception.
        transaction.prepare(request)
        transaction.commit(self._trie, self._get_blessed_version(),
                           self._store)

    def test_modify_deletes(self):
        request = ModifyRequest(deletes=[])
        n_tokens_before = len(self._trie)
        some_token = copy.copy(self._trie['/some_dir/some_token_0'])
        request.deletes.append(some_token)
        some_other_token = copy.copy(
            self._trie['/some_dir/some_token_0/some_other_token_0'])
        request.deletes.append(some_other_token)
        transaction = ModifyTransaction()
        transaction.prepare(request)
        transaction.commit(self._trie, self._get_blessed_version(),
                           self._store)
        n_tokens_after = len(self._trie)
        # We deleted two things from self._trie.
        self.assertEqual(n_tokens_before - 2, n_tokens_after)
        self._check_version_uniqueness()

    def test_modify_updates(self):
        request = ModifyRequest(updates=[])
        n_tokens_before = len(self._trie)
        token = copy.copy(self._trie['/some_dir/some_token_0'])
        token.data = 'some other data'
        request.updates.append(token)
        new_token = Token(name='/some_other_dir/some_token', data='some data')
        request.updates.append(new_token)
        transaction = ModifyTransaction()
        transaction.prepare(request)
        response = transaction.commit(self._trie, self._get_blessed_version(),
                                      self._store)

        self.assertEqual(2, len(response.updates))
        self.assertNotEqual(token.version, response.updates[0].version)
        self.assertEqual(token.name, response.updates[0].name)
        self.assertEqual(token.data, response.updates[0].data)
        self.assertLess(0, response.updates[1].version)
        self.assertEqual(new_token.name, response.updates[1].name)
        self.assertEqual(new_token.data, response.updates[1].data)
        n_tokens_after = len(self._trie)
        self.assertEqual(n_tokens_before + 1, n_tokens_after)
        self._check_version_uniqueness()

    def test_modify_no_name_change(self):
        """Do not allow changing token names."""
        request = ModifyRequest(updates=[])
        # pickle gets maximum recursion depth exceeded when traversing
        # the trie, probably a bug in pickle. Setting the recursion limit
        # to a high number fixes it.
        sys.setrecursionlimit(10000)
        trie_before = pickle.dumps(self._trie)
        token = copy.copy(self._trie['/some_dir/some_token_0'])
        token.name = '/some_other_dir/some_token_0'
        request.updates.append(token)
        transaction = ModifyTransaction()
        transaction.prepare(request)
        self.assertRaises(TokenMasterException, transaction.commit, self._trie,
                          self._get_blessed_version(), self._store)
        trie_after = pickle.dumps(self._trie)
        self.assertEqual(trie_before, trie_after)

    def test_modify_deletes_and_updates(self):
        """Updates and deletes in a single request."""
        request = ModifyRequest(updates=[], deletes=[])
        n_tokens_before = len(self._trie)
        delete_token = copy.copy(self._trie['/some_dir/some_token_0'])
        request.deletes.append(delete_token)
        update_token = copy.copy(self._trie['/some_dir/some_token_1'])
        update_token.data = 'some other data'
        request.updates.append(update_token)
        transaction = ModifyTransaction()
        transaction.prepare(request)
        response = transaction.commit(self._trie, self._get_blessed_version(),
                                      self._store)

        self.assertEqual(1, len(response.updates))
        n_tokens_after = len(self._trie)
        self.assertEqual(n_tokens_before - 1, n_tokens_after)
        self._check_version_uniqueness()

    # Query tests.
    def test_query_empty(self):
        request = QueryRequest()
        transaction = QueryTransaction()
        # Make sure that prepare and commit do not throw an exception.
        transaction.prepare(request)
        transaction.commit(self._trie, self._get_blessed_version(),
                           self._store)

    def test_query(self):
        some_query = Query()
        some_query.namePrefix = '/some_dir'
        some_query.maxTokens = 10
        some_other_query = Query()
        some_other_query.namePrefix = '/some_dir/some_token_0'
        some_other_query.maxTokens = 100
        request = QueryRequest()
        request.queries = [some_query, some_other_query]
        transaction = QueryTransaction()
        transaction.prepare(request)
        response = transaction.commit(self._trie, self._get_blessed_version(),
                                      self._store)
        self.assertEqual(2, len(response.tokens))
        self.assertEqual(10, len(response.tokens[0]))
        for token in response.tokens[0]:
            self.assertTrue(token.name.startswith('/some_dir'))
            self.assertEqual(9, token.priority)
        self.assertEqual(11, len(response.tokens[1]))
        for token in response.tokens[1]:
            self.assertTrue(token.name.startswith('/some_dir/some_token_0'))

    # Query and own tests.
    def test_query_and_own_empty(self):
        request = QueryAndOwnRequest()
        transaction = QueryAndOwnTransaction()
        # Make sure that prepare and commit do not throw an exception.
        transaction.prepare(request)
        transaction.commit(self._trie, self._get_blessed_version(),
                           self._store)

    def test_query_and_own(self):
        some_token = self._trie['/some_dir/some_token_0']
        some_token.owner = 'some_owner'
        some_token.expirationTime = 10  # in the past
        some_token = self._trie['/some_dir/some_token_1']
        some_token.owner = 'some_owner'
        some_token.expirationTime = sys.maxint  # in the future
        some_query = Query()
        some_query.namePrefix = ''
        some_query.maxTokens = 200
        request = QueryAndOwnRequest()
        request.owner = 'some_other_owner'
        request.expirationTime = sys.maxint
        request.query = some_query
        transaction = QueryAndOwnTransaction()
        transaction.prepare(request)
        response = transaction.commit(self._trie, self._get_blessed_version(),
                                      self._store)

        # Should have owned all tokens but two: the blessed version and the one
        # token that is already owned.
        self.assertEqual(len(self._trie) - 2, len(response.tokens))
        for token in response.tokens:
            self.assertEquals('some_other_owner', token.owner)
            self.assertEquals(sys.maxint, token.expirationTime)
コード例 #18
0
class DataBuilderTestCase(unittest.TestCase):
    def setUp(self):
        self._store = EphemeralStore()
        self._data_builder = DataBuilder(self._store)

    @mock.patch('os.makedirs')
    @mock.patch('__builtin__.open')
    def _add_tokens(self, _, __):
        generate_workflows(2, 2, 2, 2, 2, self._store)

    def test_get_workflows_empty(self):
        self.assertEqual([], self._data_builder.get_workflows())

    def _get_workflows(self):
        self._add_tokens()
        workflows = self._data_builder.get_workflows()
        self.assertEqual(4, len(workflows))
        workflow_status = {'workflow_0': Status.RUNNING,
                           'workflow_1': Status.RUNNING,
                           'workflow_2': Status.SUCCESS,
                           'workflow_3': Status.FAILURE}
        for workflow in workflows:
            self.assertEqual(workflow_status[workflow.workflow],
                             workflow.status)
            self.assertEqual('instance_1', workflow.last_instance)
            del workflow_status[workflow.workflow]
        self.assertEqual({}, workflow_status)

    def test_get_workflows(self):
        self._get_workflows()

    def test_get_workflows_using_cache(self):
        self._data_builder.use_cache = True
        self._get_workflows()
        # Only finished (archived) workflow instances should have been cached.
        expected_cached_names = ['/workflow/workflow_2/instance_0/',
                                 '/workflow/workflow_2/instance_1/',
                                 '/workflow/workflow_3/instance_0/',
                                 '/workflow/workflow_3/instance_1/']
        cached_names = sorted(self._store.read_cached_data_names())
        self.assertEqual(expected_cached_names, cached_names)

    def test_get_workflow_empty(self):
        self.assertIsNone(self._data_builder.get_workflow('does_not_exist'))

    def _get_workflow(self):
        self._add_tokens()
        workflow = self._data_builder.get_workflow('workflow_0')
        self.assertEqual('workflow_0', workflow.workflow)
        self.assertEqual(Status.RUNNING, workflow.status)
        self.assertEqual('instance_1', workflow.last_instance)

    def test_get_workflow(self):
        self._get_workflow()

    def test_get_workflow_using_cache(self):
        self._data_builder.use_cache = True
        self._get_workflow()
        # Instances of a running workflow should not have been cached.
        self.assertEqual([], self._store.read_cached_data_names())

    def test_get_instances_empty(self):
        self.assertEqual([],
                         self._data_builder.get_instances('does_not_exist'))

    def _get_instances(self):
        self._add_tokens()
        instances = self._data_builder.get_instances('workflow_2')
        self.assertEqual(2, len(instances))
        instance_status = [Status.SUCCESS, Status.FAILURE]
        for instance in instances:
            self.assertEqual('workflow_2', instance.workflow)
            instance_status.remove(instance.status)
        self.assertEqual([], instance_status)

    def test_get_instances(self):
        self._get_instances()

    def test_get_instances_using_cache(self):
        self._data_builder.use_cache = True
        self._get_instances()
        expected_cached_names = ['/workflow/workflow_2/instance_0/',
                                 '/workflow/workflow_2/instance_1/']
        cached_names = sorted(self._store.read_cached_data_names())
        self.assertEqual(expected_cached_names, cached_names)

    def test_get_instance_empty(self):
        self.assertIsNone(None,
                          self._data_builder.get_instance('does_not_exist',
                                                          'instance_0'))

    def _get_instance(self):
        self._add_tokens()
        instance = self._data_builder.get_instance('workflow_0', 'instance_0')
        self.assertEqual('workflow_0', instance.workflow)
        self.assertEqual('instance_0', instance.instance)

    def test_get_instance(self):
        self._get_instance()

    def test_get_instance_using_cache(self):
        self._data_builder.use_cache = True
        self._get_instance()
        # Running instance should not have been cached.
        self.assertEqual([], self._store.read_cached_data_names())

    def test_get_jobs_empty(self):
        self.assertEqual([],
                         self._data_builder.get_jobs('does_not_exist',
                                                     'does_not_exist'))

    def test_get_jobs(self):
        self._add_tokens()
        jobs = self._data_builder.get_jobs('workflow_0', 'instance_0')
        self.assertEqual(2, len(jobs))
        for job in jobs:
            self.assertEqual('workflow_0', job.workflow)
            self.assertEqual('instance_0', job.instance)
            self.assertEqual('ShellJob', job.job_type)
            self.assertTrue(job.info.startswith('command=some command'))
            self.assertEqual(Status.FAILURE, job.status)
        self.assertEqual([(0, ''), (1, 'SUCCESS'), (9, 'FAILURE')],
                         jobs[0].progress)
        self.assertEqual([(89, ''), (1, 'SUCCESS'), (9, 'FAILURE')],
                         jobs[1].progress)

    def test_get_executions_empty(self):
        self.assertEqual([],
                         self._data_builder.get_executions('does_not_exist',
                                                           'does_not_exist',
                                                           'does_not_exist'))

    def test_get_executions(self):
        self._add_tokens()
        executions = self._data_builder.get_executions('workflow_0',
                                                       'instance_0',
                                                       'job_0')
        self.assertEqual(2, len(executions))
        exit_codes = [0, 1]
        for execution in executions:
            self.assertEqual('workflow_0', execution.workflow)
            self.assertEqual('instance_0', execution.instance)
            self.assertEqual('job_0', execution.job)
            self.assertTrue(execution.info.startswith('some_command'))
            exit_codes.remove(execution.exit_code)
            self.assertEqual(2, len(execution.logs))

    def test_get_executions_across_instances_empty(self):
        self.assertEqual([],
                         self._data_builder.get_executions_across_instances(
                             'does_not_exist',
                             'does_not_exist'))

    def test_get_executions_across_instances(self):
        self._add_tokens()
        executions = self._data_builder.get_executions_across_instances(
            'workflow_0', 'job_0')
        self.assertEqual(2 * 2, len(executions))
        exit_codes = [0, 0, 1, 1]
        for execution in executions:
            self.assertEqual('workflow_0', execution.workflow)
            self.assertEqual('job_0', execution.job)
            self.assertTrue(execution.info.startswith('some_command'))
            exit_codes.remove(execution.exit_code)
            self.assertEqual(2, len(execution.logs))

    def test_get_execution_empty(self):
        self.assertIsNone(self._data_builder.get_execution('does_not_exist',
                                                           'does_not_exist',
                                                           'does_not_exist',
                                                           0))

    def test_get_execution(self):
        self._add_tokens()
        execution = self._data_builder.get_execution('workflow_0',
                                                     'instance_0',
                                                     'job_0',
                                                     1)
        self.assertEqual('workflow_0', execution.workflow)
        self.assertEqual('instance_0', execution.instance)
        self.assertEqual('job_0', execution.job)
        self.assertEqual(1, execution.execution)
        self.assertEqual('some_command 1 some_args 1', execution.info)
        self.assertEqual(1, execution.exit_code)
        self.assertEqual(2, execution.start_time)
        self.assertEqual(13, execution.end_time)
        self.assertEqual(2, len(execution.logs))

    @mock.patch('__builtin__.open')
    def test_get_file_content_no_file(self, _):
        self.assertEqual('',
                         self._data_builder.get_file_content('does_not_exist',
                                                             'does_not_exist',
                                                             'does_not_exist',
                                                             'does_not_exist',
                                                             'does_not_exist'))

    @mock.patch('os.makedirs')
    @mock.patch('__builtin__.open')
    def test_get_file_content(self, open_mock, _):
        generate_workflows(2, 2, 2, 2, 2, self._store)

        file_mock = mock.MagicMock()
        open_mock.return_value = file_mock
        file_mock.__enter__.return_value = file_mock
        file_mock.read.return_value = 'some_content'

        content = self._data_builder.get_file_content('workflow_0',
                                                      'instance_0',
                                                      'job_0',
                                                      0,
                                                      'info')
        self.assertEqual('some_content', content)

    def test_get_token_paths_empty(self):
        self.assertRaises(PinballException,
                          self._data_builder.get_token_paths,
                          '')

    def test_get_token_paths(self):
        self._add_tokens()
        token_paths = self._data_builder.get_token_paths(
            '/workflow/workflow_0/instance_0/job/waiting/')
        self.assertEqual(2, len(token_paths))
        paths = ['/workflow/workflow_0/instance_0/job/waiting/job_0',
                 '/workflow/workflow_0/instance_0/job/waiting/job_1']
        for token_path in token_paths:
            self.assertEqual(1, token_path.count)
            paths.remove(token_path.path)
        self.assertEqual([], paths)

    def test_get_token_empty(self):
        self.assertRaises(PinballException,
                          self._data_builder.get_token,
                          '/does_not_exist')

    def test_get_token(self):
        self._add_tokens()
        token = self._data_builder.get_token(
            '/workflow/workflow_0/instance_0/job/waiting/job_0')
        self.assertEqual('/workflow/workflow_0/instance_0/job/waiting/job_0',
                         token.name)
        self.assertIsNone(token.owner)
        self.assertIsNone(token.expiration_time)
        self.assertEqual(0, token.priority)
        self.assertIsNotNone(token.data)

    def test_signal_not_set(self):
        self.assertFalse(self._data_builder.is_signal_set('does_not_exist', 0,
                                                          Signal.DRAIN))

    def test_signal_set(self):
        self._add_tokens()
        self.assertTrue(self._data_builder.is_signal_set('workflow_0', 0,
                                                         Signal.DRAIN))

    # Workflow status should be the running instance
    def test_workflow_data_from_instances_data1(self):
        wf_instance_list = [
            WorkflowInstanceData('wf', '22346', Status.ABORTED, 12346, 54321),
            WorkflowInstanceData('wf', '22345', Status.RUNNING, 12345, None),
            WorkflowInstanceData('wf', '22347', Status.SUCCESS, 12347, 12390),
            WorkflowInstanceData('wf', '22348', Status.RUNNING, 12348, None),
        ]
        wf_data = DataBuilder._workflow_data_from_instances_data(
            wf_instance_list)
        self.assertEquals(wf_data.workflow, 'wf')
        self.assertEquals(wf_data.status, Status.RUNNING)
        self.assertEquals(wf_data.last_instance, '22348')
        self.assertEquals(wf_data.last_start_time, 12348)
        self.assertEquals(wf_data.last_end_time, None)
        self.assertEquals(wf_data.running_instance_number, 2)

    # Workflow status should be the running instance even if some instance ended
    # at sys.maxint time
    def test_workflow_data_from_instances_data2(self):
        wf_instance_list = [
            WorkflowInstanceData('wf', '22346', Status.ABORTED, 12355, sys.maxint),
            WorkflowInstanceData('wf', '22347', Status.SUCCESS, 12365, 12390),
            WorkflowInstanceData('wf', '22345', Status.RUNNING, 12345, None),
        ]
        wf_data = DataBuilder._workflow_data_from_instances_data(
            wf_instance_list)
        self.assertEquals(wf_data.workflow, 'wf')
        self.assertEquals(wf_data.status, Status.RUNNING)
        self.assertEquals(wf_data.last_instance, '22345')
        self.assertEquals(wf_data.last_start_time, 12345)
        self.assertEquals(wf_data.last_end_time, None)
        self.assertEquals(wf_data.running_instance_number, 1)

    # Workflow status should be the last finished instance
    def test_workflow_data_from_instances_data3(self):
        wf_instance_list = [
            WorkflowInstanceData('wf', '22346', Status.ABORTED, 12345, 12392),
            WorkflowInstanceData('wf', '22347', Status.SUCCESS, 12346, 12393),
            WorkflowInstanceData('wf', '22345', Status.FAILURE, 12347, 12391),
            ]
        wf_data = DataBuilder._workflow_data_from_instances_data(
            wf_instance_list)
        self.assertEquals(wf_data.workflow, 'wf')
        self.assertEquals(wf_data.status, Status.SUCCESS)
        self.assertEquals(wf_data.last_instance, '22347')
        self.assertEquals(wf_data.last_start_time, 12346)
        self.assertEquals(wf_data.last_end_time, 12393)
        self.assertEquals(wf_data.running_instance_number, 0)

    # Workflow status should be the last finished instance even if some instance
    # ended with sys.maxint time
    def test_workflow_data_from_instances_data4(self):
        wf_instance_list = [
            WorkflowInstanceData('wf', '22346', Status.ABORTED, 12345, 12392),
            WorkflowInstanceData('wf', '22347', Status.SUCCESS, 12346, 12393),
            WorkflowInstanceData('wf', '22345', Status.FAILURE, 12391, sys.maxint),
            ]
        wf_data = DataBuilder._workflow_data_from_instances_data(
            wf_instance_list)
        self.assertEquals(wf_data.workflow, 'wf')
        self.assertEquals(wf_data.status, Status.SUCCESS)
        self.assertEquals(wf_data.last_instance, '22347')
        self.assertEquals(wf_data.last_start_time, 12346)
        self.assertEquals(wf_data.last_end_time, 12393)
        self.assertEquals(wf_data.running_instance_number, 0)
コード例 #19
0
ファイル: analyzer_test.py プロジェクト: runt18/pinball
class AnalyzerTestCase(unittest.TestCase):
    _NUM_LEVELS = 3

    def setUp(self):
        self._store = EphemeralStore()

    def _add_active_workflow_tokens(self):
        """Add some active workflow tokens.

        The job dependencies form a complete binary tree turned upside down.
        I.e., each job has two parents.
        """
        self._store = EphemeralStore()
        version = 1
        for level in range(AnalyzerTestCase._NUM_LEVELS):
            jobs_at_level = 2 ** (AnalyzerTestCase._NUM_LEVELS - level - 1)
            for job_index in range(jobs_at_level):
                job_name = 'job_{0:d}_{1:d}'.format(level, job_index)
                event_name = Name(workflow='some_workflow',
                                  instance='123',
                                  job=job_name,
                                  event='some_event')
                if level == 0:
                    inputs = [Name.WORKFLOW_START_INPUT,
                              Name.WORKFLOW_START_INPUT + '_prime']
                    event_name.input = Name.WORKFLOW_START_INPUT
                else:
                    inputs = ['job_{0:d}_{1:d}'.format(level - 1, 2 * job_index),
                              'job_{0:d}_{1:d}'.format(level - 1, 2 * job_index + 1)]
                    event_name.input = 'job_{0:d}_{1:d}'.format(level - 1, 2 * job_index)
                if level == AnalyzerTestCase._NUM_LEVELS - 1:
                    outputs = []
                else:
                    outputs = ['job_{0:d}_{1:d}'.format(level + 1, job_index / 2)]
                job = ShellJob(name=job_name,
                               inputs=inputs,
                               outputs=outputs,
                               command='some_command')
                job.history.append(ExecutionRecord())
                name = Name(workflow='some_workflow', instance='123',
                            job_state=Name.WAITING_STATE, job=job_name)
                job_token = Token(version=version,
                                  name=name.get_job_token_name(),
                                  priority=10,
                                  data=pickle.dumps(job))
                version += 1
                event = Event('some_event')
                event_token = Token(version=version,
                                    name=event_name.get_event_token_name(),
                                    priority=10,
                                    data=pickle.dumps(event))
                self._store.commit_tokens([job_token, event_token])

    def _archive_tokens(self):
        tokens = self._store.read_active_tokens()
        self._store.archive_tokens(tokens)
        return tokens

    def _simulate(self):
        """Simulate execution of active jobs."""
        tokens = self._store.read_tokens()
        satisfied_deps = set()
        executed_jobs = []
        jobs = {}
        for token in tokens:
            event_name = Name.from_event_token_name(token.name)
            if event_name.event:
                satisfied_deps.add((event_name.input, event_name.job))
            else:
                job_name = Name.from_job_token_name(token.name)
                if job_name.job:
                    job = pickle.loads(token.data)
                    jobs[job.name] = job
        dep_counts = collections.defaultdict(int)
        while satisfied_deps:
            last_satisfied_deps = satisfied_deps
            satisfied_deps = set()
            for (_, job_name) in last_satisfied_deps:
                dep_counts[job_name] += 1
                if dep_counts[job_name] == 2:
                    executed_jobs.append(job_name)
                    job = jobs[job_name]
                    for output in job.outputs:
                        satisfied_deps.add((job_name, output))
        return executed_jobs

    def test_change_instance(self):
        self._add_active_workflow_tokens()
        self._archive_tokens()
        analyzer = Analyzer.from_store(self._store, 'some_workflow', '123')
        analyzer.change_instance('321')
        tokens = analyzer.get_tokens()
        self.assertLess(0, len(tokens))
        for token in tokens:
            name = Name.from_job_token_name(token.name)
            self.assertEqual('321', name.instance)

    def test_change_job_histories(self):
        self._add_active_workflow_tokens()
        self._archive_tokens()
        analyzer = Analyzer.from_store(self._store, 'some_workflow', '123')
        analyzer.clear_job_histories()
        tokens = analyzer.get_tokens()
        self.assertLess(0, len(tokens))
        for token in tokens:
            job = pickle.loads(token.data)
            self.assertEqual([], job.history)

    def test_poison_no_tokens(self):
        analyzer = Analyzer.from_store(self._store, 'some_workflow', '123')
        analyzer.poison([])

    def test_poison_no_roots(self):
        self._add_active_workflow_tokens()
        self._archive_tokens()
        analyzer = Analyzer.from_store(self._store, 'some_workflow', '123')
        analyzer.poison([])
        tokens = analyzer.get_tokens()
        self._store.commit_tokens(updates=tokens)
        executed_jobs = self._simulate()
        self.assertEqual([], executed_jobs)

    def test_poison_all(self):
        """Poison all top level jobs."""
        self._add_active_workflow_tokens()
        self._archive_tokens()
        analyzer = Analyzer.from_store(self._store, 'some_workflow', '123')

        roots = []
        for job_index in range(0, 2 ** (AnalyzerTestCase._NUM_LEVELS - 1)):
            roots.append('job_0_{0:d}'.format(job_index))

        analyzer.poison(roots)
        tokens = analyzer.get_tokens()
        self._store.commit_tokens(updates=tokens)
        executed_jobs = self._simulate()
        # We expect that every job has run.
        expected_num_executed_jobs = 2 ** (AnalyzerTestCase._NUM_LEVELS) - 1
        self.assertEqual(expected_num_executed_jobs, len(executed_jobs))

    def test_poison_subset(self):
        """Poison every second top level job."""
        self._add_active_workflow_tokens()
        self._archive_tokens()
        analyzer = Analyzer.from_store(self._store, 'some_workflow', '123')

        roots = []
        for job_index in range(0, 2 ** (AnalyzerTestCase._NUM_LEVELS - 1), 2):
            roots.append('job_0_{0:d}'.format(job_index))

        analyzer.poison(roots)
        tokens = analyzer.get_tokens()
        self._store.commit_tokens(updates=tokens)
        executed_jobs = self._simulate()
        # We expect that every second job at the top level and every job at
        # a lower level was run.
        expected_num_executed_jobs = (
            2 ** (AnalyzerTestCase._NUM_LEVELS - 1) - 1 +
            2 ** (AnalyzerTestCase._NUM_LEVELS - 1) / 2)
        self.assertEqual(expected_num_executed_jobs, len(executed_jobs))

    def test_poison_get_new_event_tokens(self):
        """Poison all top level jobs and get new event tokens."""
        self._add_active_workflow_tokens()
        tokens = self._archive_tokens()
        analyzer = Analyzer.from_store(self._store, 'some_workflow', '123')
        analyzer._filter_event_tokens(tokens)

        roots = []
        for job_index in range(0, 2 ** (AnalyzerTestCase._NUM_LEVELS - 1)):
            roots.append('job_0_{0:d}'.format(job_index))

        analyzer.poison(roots)
        tokens = analyzer.get_new_event_tokens()
        expected_num_new_event_tokens = 2 ** AnalyzerTestCase._NUM_LEVELS
        self.assertEqual(expected_num_new_event_tokens, len(tokens))
コード例 #20
0
ファイル: analyzer_test.py プロジェクト: runt18/pinball
 def setUp(self):
     self._store = EphemeralStore()
コード例 #21
0
 def setUp(self):
     self._store = EphemeralStore()
コード例 #22
0
class TransactionTestCase(unittest.TestCase):
    def setUp(self):
        """Set up self._trie with 111 tokens, one of them a blessed version."""
        self._trie = pytrie.StringTrie()
        self._store = EphemeralStore()
        blessed_version = BlessedVersion(MasterHandler._BLESSED_VERSION,
                                         MasterHandler._MASTER_OWNER)
        for i in range(0, 10):
            some_token = Token(blessed_version.advance_version(),
                               '/some_dir/some_token_%d' % i,
                               priority=i,
                               data='some_data_%d' % i)
            self._trie[some_token.name] = some_token
            self._store.commit_tokens(updates=[some_token])
            for j in range(0, 10):
                some_other_token = Token(
                    blessed_version.advance_version(),
                    '/some_dir/some_token_%d/some_other_token_%d' % (i, j),
                    priority=j,
                    data='some_data_%d_%d' % (i, j))
                self._trie[some_other_token.name] = some_other_token
                self._store.commit_tokens(updates=[some_other_token])
        blessed_version.advance_version()
        self._trie[MasterHandler._BLESSED_VERSION] = blessed_version
        self._store.commit_tokens(updates=[blessed_version])
        self._check_version_uniqueness()

    def _check_version_uniqueness(self):
        """Check self._trie.values() have distinct version values."""
        versions = set()
        for token in self._trie.values():
            versions.add(token.version)
        self.assertEqual(len(self._trie), len(versions))

    def _get_blessed_version(self):
        return self._trie[MasterHandler._BLESSED_VERSION]

    # Archive tests.
    def test_archive_empty(self):
        request = ArchiveRequest()
        transaction = ArchiveTransaction()
        # Make sure that prepare and commit do not throw an exception.
        transaction.prepare(request)
        transaction.commit(self._trie,
                           self._get_blessed_version(),
                           self._store)

    def test_archive(self):
        request = ArchiveRequest(tokens=[])
        n_tokens_before = len(self._trie)
        some_token = copy.copy(self._trie['/some_dir/some_token_0'])
        request.tokens.append(some_token)
        some_other_token = copy.copy(
            self._trie['/some_dir/some_token_0/some_other_token_0'])
        request.tokens.append(some_other_token)
        transaction = ArchiveTransaction()
        transaction.prepare(request)
        transaction.commit(self._trie,
                           self._get_blessed_version(),
                           self._store)
        n_tokens_after = len(self._trie)
        # We deleted two things from self._trie.
        self.assertEqual(n_tokens_before - 2, n_tokens_after)
        n_active_tokens = len(self._store.read_active_tokens())
        self.assertEqual(n_tokens_after, n_active_tokens)
        n_all_tokens = len(self._store.read_tokens())
        self.assertEqual(n_tokens_before, n_all_tokens)

    # Group tests.
    def test_group_empty(self):
        request = GroupRequest()
        transaction = GroupTransaction()
        # Make sure that prepare and commit do not throw an exception.
        transaction.prepare(request)
        transaction.commit(self._trie,
                           self._get_blessed_version(),
                           self._store)

    def test_group(self):
        request = GroupRequest()
        request.namePrefix = '/some_dir/'
        request.groupSuffix = '/'
        transaction = GroupTransaction()
        transaction.prepare(request)
        response = transaction.commit(self._trie,
                                      self._get_blessed_version(),
                                      self._store)

        expected_groups = set()
        for i in range(0, 10):
            expected_groups.add('/some_dir/some_token_%d' % i)
        groups = set()
        for group, count in response.counts.iteritems():
            groups.add(group)
            self.assertEqual(11, count)
        self.assertEqual(expected_groups, groups)

    # Modify tests.
    def test_modity_empty(self):
        request = ModifyRequest()
        transaction = ModifyTransaction()
        # Make sure that prepare and commit do not throw an exception.
        transaction.prepare(request)
        transaction.commit(self._trie,
                           self._get_blessed_version(),
                           self._store)

    def test_modify_deletes(self):
        request = ModifyRequest(deletes=[])
        n_tokens_before = len(self._trie)
        some_token = copy.copy(self._trie['/some_dir/some_token_0'])
        request.deletes.append(some_token)
        some_other_token = copy.copy(
            self._trie['/some_dir/some_token_0/some_other_token_0'])
        request.deletes.append(some_other_token)
        transaction = ModifyTransaction()
        transaction.prepare(request)
        transaction.commit(self._trie,
                           self._get_blessed_version(),
                           self._store)
        n_tokens_after = len(self._trie)
        # We deleted two things from self._trie.
        self.assertEqual(n_tokens_before - 2, n_tokens_after)
        self._check_version_uniqueness()

    def test_modify_updates(self):
        request = ModifyRequest(updates=[])
        n_tokens_before = len(self._trie)
        token = copy.copy(self._trie['/some_dir/some_token_0'])
        token.data = 'some other data'
        request.updates.append(token)
        new_token = Token(name='/some_other_dir/some_token', data='some data')
        request.updates.append(new_token)
        transaction = ModifyTransaction()
        transaction.prepare(request)
        response = transaction.commit(self._trie,
                                      self._get_blessed_version(),
                                      self._store)

        self.assertEqual(2, len(response.updates))
        self.assertNotEqual(token.version, response.updates[0].version)
        self.assertEqual(token.name, response.updates[0].name)
        self.assertEqual(token.data, response.updates[0].data)
        self.assertLess(0, response.updates[1].version)
        self.assertEqual(new_token.name, response.updates[1].name)
        self.assertEqual(new_token.data, response.updates[1].data)
        n_tokens_after = len(self._trie)
        self.assertEqual(n_tokens_before + 1, n_tokens_after)
        self._check_version_uniqueness()

    def test_modify_no_name_change(self):
        """Do not allow changing token names."""
        request = ModifyRequest(updates=[])
        # pickle gets maximum recursion depth exceeded when traversing
        # the trie, probably a bug in pickle. Setting the recursion limit
        # to a high number fixes it.
        sys.setrecursionlimit(10000)
        trie_before = pickle.dumps(self._trie)
        token = copy.copy(self._trie['/some_dir/some_token_0'])
        token.name = '/some_other_dir/some_token_0'
        request.updates.append(token)
        transaction = ModifyTransaction()
        transaction.prepare(request)
        self.assertRaises(TokenMasterException, transaction.commit,
                          self._trie, self._get_blessed_version(), self._store)
        trie_after = pickle.dumps(self._trie)
        self.assertEqual(trie_before, trie_after)

    def test_modify_deletes_and_updates(self):
        """Updates and deletes in a single request."""
        request = ModifyRequest(updates=[], deletes=[])
        n_tokens_before = len(self._trie)
        delete_token = copy.copy(self._trie['/some_dir/some_token_0'])
        request.deletes.append(delete_token)
        update_token = copy.copy(self._trie['/some_dir/some_token_1'])
        update_token.data = 'some other data'
        request.updates.append(update_token)
        transaction = ModifyTransaction()
        transaction.prepare(request)
        response = transaction.commit(self._trie,
                                      self._get_blessed_version(),
                                      self._store)

        self.assertEqual(1, len(response.updates))
        n_tokens_after = len(self._trie)
        self.assertEqual(n_tokens_before - 1, n_tokens_after)
        self._check_version_uniqueness()

    # Query tests.
    def test_query_empty(self):
        request = QueryRequest()
        transaction = QueryTransaction()
        # Make sure that prepare and commit do not throw an exception.
        transaction.prepare(request)
        transaction.commit(self._trie,
                           self._get_blessed_version(),
                           self._store)

    def test_query(self):
        some_query = Query()
        some_query.namePrefix = '/some_dir'
        some_query.maxTokens = 10
        some_other_query = Query()
        some_other_query.namePrefix = '/some_dir/some_token_0'
        some_other_query.maxTokens = 100
        request = QueryRequest()
        request.queries = [some_query, some_other_query]
        transaction = QueryTransaction()
        transaction.prepare(request)
        response = transaction.commit(self._trie,
                                      self._get_blessed_version(),
                                      self._store)
        self.assertEqual(2, len(response.tokens))
        self.assertEqual(10, len(response.tokens[0]))
        for token in response.tokens[0]:
            self.assertTrue(token.name.startswith('/some_dir'))
            self.assertEqual(9, token.priority)
        self.assertEqual(11, len(response.tokens[1]))
        for token in response.tokens[1]:
            self.assertTrue(token.name.startswith('/some_dir/some_token_0'))

    # Query and own tests.
    def test_query_and_own_empty(self):
        request = QueryAndOwnRequest()
        transaction = QueryAndOwnTransaction()
        # Make sure that prepare and commit do not throw an exception.
        transaction.prepare(request)
        transaction.commit(self._trie,
                           self._get_blessed_version(),
                           self._store)

    def test_query_and_own(self):
        some_token = self._trie['/some_dir/some_token_0']
        some_token.owner = 'some_owner'
        some_token.expirationTime = 10  # in the past
        some_token = self._trie['/some_dir/some_token_1']
        some_token.owner = 'some_owner'
        some_token.expirationTime = sys.maxint  # in the future
        some_query = Query()
        some_query.namePrefix = ''
        some_query.maxTokens = 200
        request = QueryAndOwnRequest()
        request.owner = 'some_other_owner'
        request.expirationTime = sys.maxint
        request.query = some_query
        transaction = QueryAndOwnTransaction()
        transaction.prepare(request)
        response = transaction.commit(self._trie,
                                      self._get_blessed_version(),
                                      self._store)

        # Should have owned all tokens but two: the blessed version and the one
        # token that is already owned.
        self.assertEqual(len(self._trie) - 2, len(response.tokens))
        for token in response.tokens:
            self.assertEquals('some_other_owner', token.owner)
            self.assertEquals(sys.maxint, token.expirationTime)
コード例 #23
0
 def test_modify(self):
     handler = MasterHandler(EphemeralStore())
     self._insert_token(handler)
コード例 #24
0
ファイル: signaller_test.py プロジェクト: zhengge2017/pinball
 def setUp(self):
     self._factory = Factory()
     self._factory.create_master(EphemeralStore())
コード例 #25
0
 def test_archive(self):
     handler = MasterHandler(EphemeralStore())
     token = self._insert_token(handler)
     request = ArchiveRequest()
     request.tokens = [token]
     handler.archive(request)