def test_get_translated_data__None_msgid(self): """ Verify the runtime behavior of get_translated_data() """ job = JobDefinition(self._full_record.data) with mock.patch.object(job, "_provider"): self.assertEqual(job.get_translated_data(None), None)
def test_get_translated_data__no_provider(self): """ Verify the runtime behavior of get_translated_data() """ job = JobDefinition(self._full_record.data) job._provider = None self.assertEqual(job.get_translated_data('foo'), 'foo')
def test_via_does_not_change_checksum(self): parent = JobDefinition({'name': 'parent', 'plugin': 'local'}) child = parent.create_child_job_from_record( RFC822Record({'name': 'test', 'plugin': 'shell'}, None)) helper = JobDefinition({'name': 'test', 'plugin': 'shell'}) self.assertEqual(child.via, parent.get_checksum()) self.assertEqual(child.get_checksum(), helper.get_checksum())
def test_dependency_parsing_empty(self): job = JobDefinition({ 'id': 'id', 'plugin': 'plugin'}) expected = set() observed = job.get_direct_dependencies() self.assertEqual(expected, observed)
def test_via_does_not_change_checksum(self): """ verify that the 'via' attribute in no way influences job checksum """ # Create a 'parent' job parent = JobDefinition({'id': 'parent', 'plugin': 'local'}) # Create a 'child' job, using create_child_job_from_record() should # time the two so that child.via should be parent.checksum. # # The elaborate record that gets passed has all the meta-data that # traces back to the 'parent' job (as well as some imaginary line_start # and line_end values for the purpose of the test). child = parent.create_child_job_from_record( RFC822Record(data={ 'id': 'test', 'plugin': 'shell' }, origin=Origin(source=JobOutputTextSource(parent), line_start=1, line_end=1))) # Now 'child.via' should be the same as 'parent.checksum' self.assertEqual(child.via, parent.checksum) # Create an unrelated job 'helper' with the definition identical as # 'child' but without any ties to the 'parent' job helper = JobDefinition({'id': 'test', 'plugin': 'shell'}) # And again, child.checksum should be the same as helper.checksum self.assertEqual(child.checksum, helper.checksum)
def test_environ_parsing_empty(self): job = JobDefinition({ 'name': 'name', 'plugin': 'plugin'}) expected = set() observed = job.get_environ_settings() self.assertEqual(expected, observed)
def test_get_inhibitor_list_FAILED_DEP(self): # verify that jobs that depend on another job that ran but # didn't result in OUTCOME_PASS produce the FAILED_DEP # inhibitor. j1 = JobDefinition({ 'id': 'j1', 'depends': 'j2', 'after': 'j3', }) j2 = JobDefinition({'id': 'j2'}) j3 = JobDefinition({'id': 'j3'}) session_state = mock.MagicMock(spec=SessionState) session_state.job_state_map = { 'j1': mock.Mock(spec_set=JobState), 'j2': mock.Mock(spec_set=JobState), 'j3': mock.Mock(spec_set=JobState), } jsm_j2 = session_state.job_state_map['j2'] jsm_j2.job = j2 jsm_j2.result.outcome = IJobResult.OUTCOME_FAIL jsm_j3 = session_state.job_state_map['j3'] jsm_j3.job = j3 jsm_j3.result.outcome = IJobResult.OUTCOME_FAIL self.assertEqual( self.ctrl.get_inhibitor_list(session_state, j1), [JobReadinessInhibitor(InhibitionCause.FAILED_DEP, j2, None)])
def test_get_inhibitor_list_PENDING_DEP(self): # verify that jobs that depend on another job or wait (via after) for # another that hasn't been invoked yet produce the PENDING_DEP # inhibitor j1 = JobDefinition({ 'id': 'j1', 'depends': 'j2', 'after': 'j3', }) j2 = JobDefinition({'id': 'j2'}) j3 = JobDefinition({'id': 'j3'}) session_state = mock.MagicMock(spec=SessionState) session_state.job_state_map = { 'j1': mock.Mock(spec_set=JobState), 'j2': mock.Mock(spec_set=JobState), 'j3': mock.Mock(spec_set=JobState), } jsm_j2 = session_state.job_state_map['j2'] jsm_j2.job = j2 jsm_j2.result.outcome = IJobResult.OUTCOME_NONE jsm_j3 = session_state.job_state_map['j3'] jsm_j3.job = j3 jsm_j3.result.outcome = IJobResult.OUTCOME_NONE self.assertEqual(self.ctrl.get_inhibitor_list(session_state, j1), [ JobReadinessInhibitor(InhibitionCause.PENDING_DEP, j2, None), JobReadinessInhibitor(InhibitionCause.PENDING_DEP, j3, None), ])
def test_resource_parsing_empty(self): job = JobDefinition({ 'name': 'name', 'plugin': 'plugin'}) expected = set() observed = job.get_resource_dependencies() self.assertEqual(expected, observed)
def test_via_does_not_change_checksum(self): """ verify that the 'via' attribute in no way influences job checksum """ # Create a 'parent' job parent = JobDefinition({'name': 'parent', 'plugin': 'local'}) # Create a 'child' job, using create_child_job_from_record() should # time the two so that child.via should be parent.checksum. # # The elaborate record that gets passed has all the meta-data that # traces back to the 'parent' job (as well as some imaginary line_start # and line_end values for the purpose of the test). child = parent.create_child_job_from_record( RFC822Record( data={'name': 'test', 'plugin': 'shell'}, origin=Origin( source=JobOutputTextSource(parent), line_start=1, line_end=1))) # Now 'child.via' should be the same as 'parent.checksum' self.assertEqual(child.via, parent.checksum) # Create an unrelated job 'helper' with the definition identical as # 'child' but without any ties to the 'parent' job helper = JobDefinition({'name': 'test', 'plugin': 'shell'}) # And again, child.checksum should be the same as helper.checksum self.assertEqual(child.checksum, helper.checksum)
def test_tr_summary__falls_back_to_id(self): """ Verify that Provider1.tr_summary() falls back to job.id, if summary is not defined """ job = JobDefinition({'id': 'id'}) self.assertEqual(job.tr_summary(), 'id')
def make_realistic_test_session(self, session_dir): # Create a more realistic session with two jobs but with richer set # of data in the actual jobs and results. job_a = JobDefinition({ 'plugin': 'shell', 'name': 'job_a', 'summary': 'This is job A', 'command': 'echo testing && true', 'requires': 'job_b.ready == "yes"' }) job_b = JobDefinition({ 'plugin': 'resource', 'name': 'job_b', 'summary': 'This is job B', 'command': 'echo ready: yes' }) session = SessionState([job_a, job_b]) session.update_desired_job_list([job_a, job_b]) result_a = MemoryJobResult({ 'outcome': IJobResult.OUTCOME_PASS, 'return_code': 0, 'io_log': [(0, 'stdout', b'testing\n')], }) result_b = MemoryJobResult({ 'outcome': IJobResult.OUTCOME_PASS, 'return_code': 0, 'comments': 'foo', 'io_log': [(0, 'stdout', b'ready: yes\n')], }) session.update_job_result(job_a, result_a) session.update_job_result(job_b, result_b) return session
def test_get_vote(self): """ verify that NonLocalJobQualifier.get_vote() works as expected """ self.assertEqual( NonLocalJobQualifier(self.origin).get_vote( JobDefinition({ 'name': 'foo', 'plugin': 'shell' })), IJobQualifier.VOTE_INCLUDE) self.assertEqual( NonLocalJobQualifier(self.origin, inclusive=False).get_vote( JobDefinition({ 'name': 'foo', 'plugin': 'shell' })), IJobQualifier.VOTE_EXCLUDE) self.assertEqual( NonLocalJobQualifier(self.origin).get_vote( JobDefinition({ 'name': 'bar', 'plugin': 'local' })), IJobQualifier.VOTE_IGNORE) self.assertEqual( NonLocalJobQualifier(self.origin, inclusive=False).get_vote( JobDefinition({ 'name': 'bar', 'plugin': 'local' })), IJobQualifier.VOTE_IGNORE)
def test_estimated_duration(self): job1 = JobDefinition({}) self.assertEqual(job1.estimated_duration, None) job2 = JobDefinition({'estimated_duration': 'foo'}) self.assertEqual(job2.estimated_duration, None) job3 = JobDefinition({'estimated_duration': '123.5'}) self.assertEqual(job3.estimated_duration, 123.5)
def test_environ_parsing_single_word(self): job = JobDefinition({ 'name': 'name', 'plugin': 'plugin', 'environ': 'word'}) expected = set(['word']) observed = job.get_environ_settings() self.assertEqual(expected, observed)
def test_dependency_parsing_single_word(self): job = JobDefinition({ 'name': 'name', 'plugin': 'plugin', 'depends': 'word'}) expected = set(['word']) observed = job.get_direct_dependencies() self.assertEqual(expected, observed)
def test_resource_parsing_typical(self): job = JobDefinition({ 'name': 'name', 'plugin': 'plugin', 'requires': 'foo.bar == 10'}) expected = set(['foo']) observed = job.get_resource_dependencies() self.assertEqual(expected, observed)
def test_depedency_parsing_newlines(self): job = JobDefinition({ 'name': 'name', 'plugin': 'plugin', 'depends': 'foo\nbar\nfroz'}) expected = set({'foo', 'bar', 'froz'}) observed = job.get_direct_dependencies() self.assertEqual(expected, observed)
def test_depedency_parsing_multiple_commas(self): job = JobDefinition({ 'name': 'name', 'plugin': 'plugin', 'depends': 'foo,,,,bar'}) expected = set({'foo', 'bar'}) observed = job.get_direct_dependencies() self.assertEqual(expected, observed)
def test_get_translated_data__typical(self): """ Verify the runtime behavior of get_translated_data() """ job = JobDefinition(self._full_record.data) with mock.patch.object(job, "_provider") as mock_provider: retval = job.get_translated_data('foo') mock_provider.get_translated_data.assert_called_with("foo") self.assertEqual(retval, mock_provider.get_translated_data())
def test_environ_parsing_single_word(self): job = JobDefinition({ 'id': 'id', 'plugin': 'plugin', 'environ': 'word' }) expected = set(['word']) observed = job.get_environ_settings() self.assertEqual(expected, observed)
def setUp(self): self.job = JobDefinition({'name': 'name', 'environ': 'foo bar froz'}) self.job._provider = Mock() self.job._provider.extra_PYTHONPATH = None self.job._provider.extra_PATH = "value-of-extra-path" self.job._provider.CHECKBOX_SHARE = "checkbox-share-value" self.session_dir = "session-dir-value" self.checkbox_data_dir = os.path.join(self.session_dir, "CHECKBOX_DATA")
def test_dependency_parsing_with_various_separators(self): job = JobDefinition({ 'name': 'name', 'plugin': 'plugin', 'depends': self.parameters_keymap[ self.parameters.glue].join(['foo', 'bar', 'froz'])}) expected = set({'foo', 'bar', 'froz'}) observed = job.get_direct_dependencies() self.assertEqual(expected, observed)
def test_environ_parsing_with_various_separators(self): job = JobDefinition({ 'id': 'id', 'plugin': 'plugin', 'environ': self.parameters_keymap[ self.parameters.glue].join(['foo', 'bar', 'froz'])}) expected = set({'foo', 'bar', 'froz'}) observed = job.get_environ_settings() self.assertEqual(expected, observed)
def test_dependency_parsing_single_word(self): job = JobDefinition({ 'id': 'id', 'plugin': 'plugin', 'depends': 'word' }) expected = set(['word']) observed = job.get_direct_dependencies() self.assertEqual(expected, observed)
def test_summary(self): job1 = JobDefinition({}) self.assertEqual(job1.summary, None) job2 = JobDefinition({'name': 'name'}) self.assertEqual(job2.summary, 'name') job3 = JobDefinition({'summary': 'summary'}) self.assertEqual(job3.summary, 'summary') job4 = JobDefinition({'summary': 'summary', 'name': 'name'}) self.assertEqual(job4.summary, 'summary')
def test_resource_parsing_typical(self): job = JobDefinition({ 'id': 'id', 'plugin': 'plugin', 'requires': 'foo.bar == 10' }) expected = set(['foo']) observed = job.get_resource_dependencies() self.assertEqual(expected, observed)
def test_repr_SessionState_typical_session(self): """ verify the representation of a SessionState with some unused jobs Unused jobs should just have no representation. Their checksum should not be mentioned. Their results (empty results) should be ignored. """ used_job = JobDefinition({ "plugin": "shell", "id": "used", "command": "echo 'hello world'", }) unused_job = JobDefinition({ "plugin": "shell", "id": "unused", "command": "echo 'hello world'", }) used_result = MemoryJobResult({ "io_log": [ (0.0, "stdout", b'hello world\n'), ], 'outcome': IJobResult.OUTCOME_PASS }) session_state = SessionState([used_job, unused_job]) session_state.update_desired_job_list([used_job]) session_state.update_job_result(used_job, used_result) data = self.helper._repr_SessionState(session_state, self.session_dir) self.assertEqual( data, { 'jobs': { 'used': ('8c393c19fdfde1b6afc5b79d0a1617ecf7531cd832a16450dc' '2f3f50d329d373') }, 'results': { 'used': [{ 'comments': None, 'execution_duration': None, 'io_log': [[0.0, 'stdout', 'aGVsbG8gd29ybGQK']], 'outcome': 'pass', 'return_code': None }] }, 'desired_job_list': ['used'], 'mandatory_job_list': [], 'metadata': { 'title': None, 'flags': [], 'running_job_name': None, 'app_blob': '', 'app_id': None, 'custom_joblist': False, 'rejected_jobs': [] }, })
def test_get_inhibitor_list_good_resource(self): # verify that jobs that require a resource that has been invoked and # produced resources for which the expression evaluates to True don't # have any inhibitors j1 = JobDefinition({'id': 'j1', 'requires': 'j2.attr == "ok"'}) j2 = JobDefinition({'id': 'j2'}) session_state = mock.MagicMock(spec=SessionState) session_state.resource_map = {'j2': [Resource({'attr': 'ok'})]} session_state.job_state_map['j2'].job = j2 self.assertEqual(self.ctrl.get_inhibitor_list(session_state, j1), [])
def test_resource_parsing_many(self): job = JobDefinition({ 'id': 'id', 'plugin': 'plugin', 'requires': ("foo.bar == 10\n" "froz.bot == 10\n") }) expected = set(['foo', 'froz']) observed = job.get_resource_dependencies() self.assertEqual(expected, observed)
def test_get_inhibitor_list_good_dep(self): # verify that jobs that depend on another job that ran and has outcome # equal to OUTCOME_PASS don't have any inhibitors j1 = JobDefinition({'id': 'j1', 'depends': 'j2'}) j2 = JobDefinition({'id': 'j2'}) session_state = mock.MagicMock(spec=SessionState) jsm_j2 = session_state.job_state_map['j2'] jsm_j2.job = j2 jsm_j2.result.outcome = IJobResult.OUTCOME_PASS self.assertEqual(self.ctrl.get_inhibitor_list(session_state, j1), [])
def test_resource_parsing_many(self): job = JobDefinition({ 'name': 'name', 'plugin': 'plugin', 'requires': ( "foo.bar == 10\n" "froz.bot == 10\n")}) expected = set(['foo', 'froz']) observed = job.get_resource_dependencies() self.assertEqual(expected, observed)
def test_tr_description(self): """ Verify that Provider1.tr_description() works as expected """ job = JobDefinition(self._full_record.data) with mock.patch.object(job, "get_normalized_translated_data") as mgntd: retval = job.tr_description() # Ensure that get_translated_data() was called mgntd.assert_called_once_with(job.description) # Ensure tr_description() returned its return value self.assertEqual(retval, mgntd())
def test_dependency_parsing_with_various_separators(self): job = JobDefinition( { "name": "name", "plugin": "plugin", "depends": self.parameters_keymap[self.parameters.glue].join(["foo", "bar", "froz"]), } ) expected = set({"foo", "bar", "froz"}) observed = job.get_direct_dependencies() self.assertEqual(expected, observed)
def test_checksum_smoke(self): job1 = JobDefinition({'id': 'id', 'plugin': 'plugin'}) identical_to_job1 = JobDefinition({'id': 'id', 'plugin': 'plugin'}) # Two distinct but identical jobs have the same checksum self.assertEqual(job1.checksum, identical_to_job1.checksum) job2 = JobDefinition({'id': 'other id', 'plugin': 'plugin'}) # Two jobs with different definitions have different checksum self.assertNotEqual(job1.checksum, job2.checksum) # The checksum is stable and does not change over time self.assertEqual( job1.checksum, "cd21b33e6a2f4d1291977b60d922bbd276775adce73fca8c69b4821c96d7314a")
def test_get_inhibitor_list_PENDING_DEP(self): # verify that jobs that depend on another job that hasn't # been invoked yet produce the PENDING_DEP inhibitor j1 = JobDefinition({'id': 'j1', 'depends': 'j2'}) j2 = JobDefinition({'id': 'j2'}) session_state = mock.MagicMock(spec=SessionState) jsm_j2 = session_state.job_state_map['j2'] jsm_j2.job = j2 jsm_j2.result.outcome = IJobResult.OUTCOME_NONE self.assertEqual(self.ctrl.get_inhibitor_list(session_state, j1), [ JobReadinessInhibitor(JobReadinessInhibitor.PENDING_DEP, j2, None) ])
def test_user_env_without_environ_keys(self): with patch.dict('os.environ', {'foo': 'bar'}): job = JobDefinition({ 'name': 'name', 'plugin': 'plugin', }) job._provider = Mock() job._provider.extra_PYTHONPATH = None job._provider.extra_PATH = "" self.assertIn( "foo", JobRunner._get_script_env(Mock(), job, only_changes=False))
def test_get_inhibitor_list_PENDING_RESOURCE(self): # verify that jobs that require a resource that hasn't been # invoked yet produce the PENDING_RESOURCE inhibitor j1 = JobDefinition({'id': 'j1', 'requires': 'j2.attr == "ok"'}) j2 = JobDefinition({'id': 'j2'}) session_state = mock.MagicMock(spec=SessionState) session_state.job_state_map['j2'].job = j2 session_state.resource_map = {} self.assertEqual(self.ctrl.get_inhibitor_list(session_state, j1), [ JobReadinessInhibitor(JobReadinessInhibitor.PENDING_RESOURCE, j2, ResourceExpression('j2.attr == "ok"')) ])
def test_root_env_without_environ_keys(self): with patch.dict('os.environ', {'foo': 'bar'}): job = JobDefinition({ 'name': 'name', 'plugin': 'plugin', 'user': '******', }) job._checkbox = Mock() job._checkbox.extra_PYTHONPATH = None job._checkbox.extra_PATH = "" self.assertNotIn( "foo", JobRunner._get_script_env(Mock(), job, only_changes=True))
def test_environ_parsing_with_various_separators(self): job = JobDefinition({ 'name': 'name', 'plugin': 'plugin', 'environ': self.parameters_keymap[self.parameters.glue].join( ['foo', 'bar', 'froz']) }) expected = set({'foo', 'bar', 'froz'}) observed = job.get_environ_settings() self.assertEqual(expected, observed)
def test_get_inhibitor_list_FAILED_RESOURCE(self): # verify that jobs that require a resource that has been # invoked and produced resources but the expression dones't # evaluate to True produce the FAILED_RESOURCE inhibitor j1 = JobDefinition({'id': 'j1', 'requires': 'j2.attr == "ok"'}) j2 = JobDefinition({'id': 'j2'}) session_state = mock.MagicMock(spec=SessionState) session_state.job_state_map['j2'].job = j2 session_state.resource_map = {'j2': [Resource({'attr': 'not-ok'})]} self.assertEqual(self.ctrl.get_inhibitor_list(session_state, j1), [ JobReadinessInhibitor(JobReadinessInhibitor.FAILED_RESOURCE, j2, ResourceExpression('j2.attr == "ok"')) ])
def test_get_normalized_translated_data__typical(self, mock_norm): """ verify the runtime behavior of get_normalized_translated_data() """ job = JobDefinition(self._full_record.data) with mock.patch.object(job, "get_translated_data") as mock_tr: retval = job.get_normalized_translated_data('foo') # get_translated_data('foo') was called mock_tr.assert_called_with("foo") # normalize_rfc822_value(x) was called mock_norm.assert_called_with(mock_tr()) # return value was returned self.assertEqual(retval, mock_norm())
def test_dependency_parsing_with_various_separators(self): job = JobDefinition({ 'id': 'id', 'plugin': 'plugin', 'depends': self.parameters_keymap[self.parameters.glue].join( ['foo', 'bar', 'froz']) }) expected = set({'foo', 'bar', 'froz'}) observed = job.get_direct_dependencies() self.assertEqual(expected, observed)
def test_select_jobs__inclusion(self): """ verify that select_jobs() honors qualifier ordering """ job_a = JobDefinition({'id': 'a'}) job_b = JobDefinition({'id': 'b'}) job_c = JobDefinition({'id': 'c'}) qual_a = JobIdQualifier("a", self.origin) qual_c = JobIdQualifier("c", self.origin) for job_list in permutations([job_a, job_b, job_c], 3): # Regardless of how the list of job is ordered the result # should be the same, depending on the qualifier list self.assertEqual(select_jobs(job_list, [qual_a, qual_c]), [job_a, job_c])
def test_category_map(self): """ Ensure that passing OPTION_WITH_CATEGORY_MAP causes a category id -> tr_name mapping to show up. """ exporter = self.TestSessionStateExporter([ SessionStateExporterBase.OPTION_WITH_CATEGORY_MAP ]) # Create three untis, two categories (foo, bar) and two jobs (froz, # bot) so that froz.category_id == foo cat_foo = CategoryUnit({ 'id': 'foo', 'name': 'The foo category', }) cat_bar = CategoryUnit({ 'id': 'bar', 'name': 'The bar category', }) job_froz = JobDefinition({ 'plugin': 'shell', 'id': 'froz', 'category_id': 'foo' }) # Create and export a session with the three units state = SessionState([cat_foo, cat_bar, job_froz]) session_manager = mock.Mock(spec_set=SessionManager, state=state) data = exporter.get_session_data_subset(session_manager) # Ensure that only the foo category was used, and the bar category was # discarded as nothing was referencing it self.assertEqual(data['category_map'], { 'foo': 'The foo category', })
def __init__(self, filename, text, provider): """ Initialize the plug-in with the specified name text """ self._filename = filename self._job_list = [] logger.debug(_("Loading jobs definitions from %r..."), filename) try: records = load_rfc822_records( text, source=FileTextSource(filename)) except RFC822SyntaxError as exc: raise PlugInError( _("Cannot load job definitions from {!r}: {}").format( filename, exc)) for record in records: try: job = JobDefinition.from_rfc822_record(record) except ValueError as exc: raise PlugInError( _("Cannot define job from record {!r}: {}").format( record, exc)) else: job._provider = provider self._job_list.append(job) logger.debug(_("Loaded %r"), job)
def run_local_job(self, checksum, env): """ Run a job with and interpret the stdout as a job definition. :param checksum: The checksum of the job to execute :param env: Environment to execute the job in. :returns: A list of job definitions that were parsed out of the output. :raises LookupError: If the checksum does not match any known job """ job = self.find_job(checksum) cmd = ["bash", "-c", job.command] output = subprocess.check_output(cmd, universal_newlines=True, env=self.modify_execution_environment(env)) job_list = [] source = JobOutputTextSource(job) try: record_list = load_rfc822_records(output, source=source) except RFC822SyntaxError as exc: logging.error(_("Syntax error in job generated from %s: %s"), job, exc) else: for record in record_list: job = JobDefinition.from_rfc822_record(record) job_list.append(job) return job_list
def _process_local_result(self, result): # First parse all records and create a list of new jobs (confusing # name, not a new list of jobs) new_job_list = [] for record in self._gen_rfc822_records_from_io_log(result): new_job = JobDefinition.from_rfc822_record(record) new_job_list.append(new_job) # Then for each new job, add it to the job_list, unless it collides # with another job with the same name. for new_job in new_job_list: try: existing_job = self._job_state_map[new_job.name] except KeyError: logger.info("Storing new job %r", new_job) self._job_state_map[new_job.name] = JobState(new_job) self._job_list.append(new_job) else: # XXX: there should be a channel where such errors could be # reported back to the UI layer. Perhaps update_job_result() # could simply return a list of problems in a similar manner # how update_desired_job_list() does. logging.warning( ("Local job %s produced job %r that collides with" " an existing job %r, the new job was discarded"), result.job, new_job, existing_job)
def test_from_rfc822_record_full_record(self): job = JobDefinition.from_rfc822_record(self._full_record) self.assertEqual(job.plugin, "plugin") self.assertEqual(job.name, "name") self.assertEqual(job.requires, "requires") self.assertEqual(job.command, "command") self.assertEqual(job.description, "description")
def test_from_rfc822_record_min_record(self): job = JobDefinition.from_rfc822_record(self._min_record) self.assertEqual(job.plugin, "plugin") self.assertEqual(job.name, "name") self.assertEqual(job.requires, None) self.assertEqual(job.command, None) self.assertEqual(job.description, None)
def setUp(self): self.job = JobDefinition({"name": "name", "environ": "foo bar froz"}) self.job._provider = Mock() self.job._provider.extra_PYTHONPATH = None self.job._provider.extra_PATH = "value-of-extra-path" self.job._provider.CHECKBOX_SHARE = "checkbox-share-value" self.session_dir = "session-dir-value" self.checkbox_data_dir = os.path.join(self.session_dir, "CHECKBOX_DATA")
def test_encode(self): job = JobDefinition({ 'name': 'name', 'plugin': 'plugin', 'requires': "foo.bar == bar"}) job_enc = job._get_persistance_subset() self.assertEqual(job_enc['data']['plugin'], job.plugin) self.assertEqual(job_enc['data']['name'], job.name) self.assertEqual(job_enc['data']['requires'], job.requires) with self.assertRaises(KeyError): job_enc['depends'] with self.assertRaises(KeyError): job_enc['description'] with self.assertRaises(KeyError): job_enc['command'] with self.assertRaises(KeyError): job_enc['origin']
def test_checksum_smoke(self): job1 = JobDefinition({"name": "name", "plugin": "plugin"}) identical_to_job1 = JobDefinition({"name": "name", "plugin": "plugin"}) # Two distinct but identical jobs have the same checksum self.assertEqual(job1.get_checksum(), identical_to_job1.get_checksum()) job2 = JobDefinition({"name": "other name", "plugin": "plugin"}) # Two jobs with different definitions have different checksum self.assertNotEqual(job1.get_checksum(), job2.get_checksum()) # The checksum is stable and does not change over time self.assertEqual(job1.get_checksum(), "ad137ba3654827cb07a254a55c5e2a8daa4de6af604e84ccdbe9b7f221014362")