def test_group(self): request = GroupRequest() request.namePrefix = '/' handler = MasterHandler(EphemeralStore()) response = handler.group(request) self.assertEqual(1, len(response.counts)) self.assertEqual(1, response.counts.values()[0])
def setUp(self): """Set up self._trie with 111 tokens, one of them a blessed version.""" self._trie = pytrie.StringTrie() self._store = EphemeralStore() blessed_version = BlessedVersion(MasterHandler._BLESSED_VERSION, MasterHandler._MASTER_OWNER) for i in range(0, 10): some_token = Token(blessed_version.advance_version(), '/some_dir/some_token_%d' % i, priority=i, data='some_data_%d' % i) self._trie[some_token.name] = some_token self._store.commit_tokens(updates=[some_token]) for j in range(0, 10): some_other_token = Token( blessed_version.advance_version(), '/some_dir/some_token_%d/some_other_token_%d' % (i, j), priority=j, data='some_data_%d_%d' % (i, j)) self._trie[some_other_token.name] = some_other_token self._store.commit_tokens(updates=[some_other_token]) blessed_version.advance_version() self._trie[MasterHandler._BLESSED_VERSION] = blessed_version self._store.commit_tokens(updates=[blessed_version]) self._check_version_uniqueness()
def setUp(self): self._factory = Factory() store = EphemeralStore() self._factory.create_master(store) emailer = Emailer('some_host', '8080') self._scheduler = Scheduler(self._factory.get_client(), store, emailer) self._client = self._factory.get_client() self._post_schedule_token()
def setUp(self): self._factory = Factory() self._store = EphemeralStore() self._factory.create_master(self._store) self._emailer = mock.Mock() self._worker = Worker(self._factory.get_client(), self._store, self._emailer) self._client = self._factory.get_client()
def test_query(self): query = Query() query.namePrefix = '' query.maxTokens = 10 request = QueryRequest() request.queries = [query] handler = MasterHandler(EphemeralStore()) response = handler.query(request) self.assertEqual(1, len(response.tokens))
def test_query_and_own(self): query = Query() query.namePrefix = '' query.maxTokens = 10 request = QueryAndOwnRequest() request.owner = 'some_owner' request.expirationTime = sys.maxint request.query = query handler = MasterHandler(EphemeralStore()) response = handler.query_and_own(request) self.assertEqual(0, len(response.tokens))
def _add_active_workflow_tokens(self): """Add some active workflow tokens. The job dependencies form a complete binary tree turned upside down. I.e., each job has two parents. """ self._store = EphemeralStore() version = 1 for level in range(AnalyzerTestCase._NUM_LEVELS): jobs_at_level = 2**(AnalyzerTestCase._NUM_LEVELS - level - 1) for job_index in range(jobs_at_level): job_name = 'job_%d_%d' % (level, job_index) event_name = Name(workflow='some_workflow', instance='123', job=job_name, event='some_event') if level == 0: inputs = [ Name.WORKFLOW_START_INPUT, Name.WORKFLOW_START_INPUT + '_prime' ] event_name.input = Name.WORKFLOW_START_INPUT else: inputs = [ 'job_%d_%d' % (level - 1, 2 * job_index), 'job_%d_%d' % (level - 1, 2 * job_index + 1) ] event_name.input = 'job_%d_%d' % (level - 1, 2 * job_index) if level == AnalyzerTestCase._NUM_LEVELS - 1: outputs = [] else: outputs = ['job_%d_%d' % (level + 1, job_index / 2)] job = ShellJob(name=job_name, inputs=inputs, outputs=outputs, command='some_command') job.history.append(ExecutionRecord()) name = Name(workflow='some_workflow', instance='123', job_state=Name.WAITING_STATE, job=job_name) job_token = Token(version=version, name=name.get_job_token_name(), priority=10, data=pickle.dumps(job)) version += 1 event = Event('some_event') event_token = Token(version=version, name=event_name.get_event_token_name(), priority=10, data=pickle.dumps(event)) self._store.commit_tokens([job_token, event_token])
def test_run(self, load_path_mock): config_parser = mock.Mock() load_path_mock.return_value = config_parser name = Name(workflow='some_workflow', instance='123', job_state=Name.WAITING_STATE, job='some_job') config_parser.get_workflow_tokens.return_value = [ Token(name=name.get_job_token_name()) ] schedule = WorkflowSchedule(workflow='some_workflow') store = EphemeralStore() emailer = Emailer('some_host', '8080') request = schedule.run(emailer, store) self.assertEqual(load_path_mock.call_args_list, [ mock.call('pinball_ext.workflow.parser.PyWorkflowParser', {}, 'schedule') ]) self.assertEqual(1, len(request.updates))
def test_run(self, load_path_mock): config_parser = mock.Mock() def load_path(params): self.assertEqual([], params.keys()) return config_parser load_path_mock.return_value = load_path name = Name(workflow='some_workflow', instance='123', job_state=Name.WAITING_STATE, job='some_job') config_parser.get_workflow_tokens.return_value = [ Token(name=name.get_job_token_name()) ] schedule = WorkflowSchedule(workflow='some_workflow') store = EphemeralStore() emailer = Emailer('some_host', '8080') request = schedule.run(emailer, store) self.assertEqual(1, len(request.updates))
def setUp(self): self._store = EphemeralStore() self._data_builder = DataBuilder(self._store)
def test_modify(self): handler = MasterHandler(EphemeralStore()) self._insert_token(handler)
def test_archive(self): handler = MasterHandler(EphemeralStore()) token = self._insert_token(handler) request = ArchiveRequest() request.tokens = [token] handler.archive(request)
def setUp(self): self._store = EphemeralStore()
def setUp(self): self._factory = Factory() self._factory.create_master(EphemeralStore())