def test_invalid_action_node_with_master_context(self): master_config = dict( nodes=[dict( name="node0", hostname="node0", )], node_pools=[dict( name="nodepool0", nodes=["node0"], )]) test_config = dict(jobs=[ dict( name="test_job", namespace='test_namespace', node="node0", schedule="daily 00:30:00 ", actions=[ dict(name="action", node="nodepool1", command="command") ], cleanup_action=dict(command="command"), ) ]) expected_message = "Unknown node name nodepool1 at test_namespace.NamedConfigFragment.jobs.Job.test_job.actions.Action.action.node" exception = assert_raises( ConfigError, validate_fragment, 'test_namespace', test_config, master_config, ) assert_in(expected_message, str(exception))
def test_bad_requires(self): test_config = dict( jobs=[ dict( name='test_job0', node='node0', schedule='interval 20s', actions=[dict(name='action', command='cmd')] ), dict( name='test_job1', node='node0', schedule='interval 20s', actions=[ dict( name='action1', command='cmd', requires=['action'] ) ] ) ], **BASE_CONFIG ) expected_message = ( 'jobs.MASTER.test_job1.action1 has a dependency ' '"action" that is not in the same job!' ) exception = assert_raises( ConfigError, valid_config, test_config, ) assert_in(expected_message, str(exception))
def test_invalid_nested_node_pools(self): test_config = dict( nodes=[ dict(name='node0', hostname='node0'), dict(name='node1', hostname='node1') ], node_pools=[ dict(name='pool0', nodes=['node1']), dict(name='pool1', nodes=['node0', 'pool0']) ], jobs=[ dict( name='test_job0', node='pool1', schedule='constant', actions=[dict(name='action', command='cmd')] ) ] ) expected_msg = "NodePool pool1 contains other NodePools: pool0" exception = assert_raises( ConfigError, valid_config, test_config, ) assert_in(expected_msg, str(exception))
def test_circular_dependency(self): test_config = dict( jobs=[ dict( name='test_job0', node='node0', schedule='interval 20s', actions=[ dict( name='action1', command='cmd', requires=['action2'] ), dict( name='action2', command='cmd', requires=['action1'] ), ] ) ], **BASE_CONFIG ) expect = "Circular dependency in job.MASTER.test_job0: action1 -> action2" exception = assert_raises( ConfigError, valid_config, test_config, ) assert_in(expect, str(exception))
def test_invalid_node_pool_config(self): test_config = dict( nodes=[ dict(name='node0', hostname='node0'), dict(name='node1', hostname='node1') ], node_pools=[ dict(name='pool0', hostname=['node1']), dict(name='pool1', nodes=['node0', 'pool0']) ], jobs=[ dict( name='test_job0', node='pool1', schedule='interval 20s', actions=[dict(name='action', command='cmd')] ) ] ) expected_msg = "NodePool pool0 is missing options" exception = assert_raises( ConfigError, valid_config, test_config, ) assert_in(expected_msg, str(exception))
def test_invalid_action_node_with_master_context(self): master_config = dict( nodes=[dict( name="node0", hostname="node0", )], node_pools=[dict( name="nodepool0", nodes=["node0"], )] ) test_config = dict( jobs=[ dict( name="test_job", namespace='test_namespace', node="node0", schedule="interval 20s", actions= [dict(name="action", node="nodepool1", command="command")], cleanup_action=dict(command="command"), ) ] ) expected_message = "Unknown node name nodepool1 at test_namespace.NamedConfigFragment.jobs.Job.test_job.actions.Action.action.node" exception = assert_raises( ConfigError, validate_fragment, 'test_namespace', test_config, master_config, ) assert_in(expected_message, str(exception))
def test_request_error(self): exception = assert_raises( client.RequestError, self.client.request, '/jobs', ) assert_in(self.url, str(exception))
def test_valid_known_hosts_file_missing(self): exception = assert_raises( ConfigError, config_parse.valid_known_hosts_file, '/bogus/path', self.context, ) assert_in('Known hosts file /bogus/path', str(exception))
def test_valid_identity_file_missing_private_key(self): exception = assert_raises( ConfigError, config_parse.valid_identity_file, '/file/not/exist', self.context, ) assert_in("Private key file", str(exception))
def test_missing_dir(self): exception = assert_raises( ConfigError, valid_output_stream_dir, 'bogus-dir', NullConfigContext, ) assert_in("is not a directory", str(exception))
def test_build_url_request_with_data(self): data = {'param': 'is_set', 'other': 1} request = client.build_url_request(self.url, data) assert request.has_header('User-agent') assert_equal(request.get_method(), 'POST') assert_equal(request.get_full_url(), self.url) assert_in('param=is_set', request.data.decode()) assert_in('other=1', request.data.decode())
def test_valid_time_delta_invalid(self): exception = assert_raises( ConfigError, config_utils.valid_time_delta, 'no time', self.context, ) assert_in('not a valid time delta: no time', str(exception))
def test_get_url_from_identifier_no_match(self): exc = assert_raises( ValueError, get_object_type_from_identifier, self.index, 'MASTER.namec', ) assert_in('namec', str(exc))
def test_valid_identity_files_missing_public_key(self): filename = self.private_file.name exception = assert_raises( ConfigError, config_parse.valid_identity_file, filename, self.context, ) assert_in("Public key file", str(exception))
def test_validator_unknown_variable_error(self): template = "The {one} thing I {seven} is {unknown}" exception = assert_raises( ConfigError, self.validator, template, NullConfigContext, ) assert_in("Unknown context variable", str(exception))
def test_get_scribed_logger(self): log = get_scribed_logger("unit_test_scribed", logging.INFO, fmt=self.SIMPLE_FORMAT, clogger_object=self.logger) log.info("This is a test") T.assert_in("This is a test", self.logger.list_lines("unit_test_scribed")) self.logger.clear_lines("unit_test_scribed") # test that we don"t double-add log = get_scribed_logger("unit_test_scribed", logging.INFO, fmt=self.SIMPLE_FORMAT, clogger_object=self.logger) log.info("This is a test") T.assert_equal(1, len([message for message in self.logger.list_lines("unit_test_scribed") if message == "This is a test"]))
def test_validate_with_none(self): expected_msg = "A StubObject is required" exception = assert_raises( ConfigError, self.validator.validate, None, config_utils.NullConfigContext, ) assert_in(expected_msg, str(exception))
def test_check_if_pidfile_exists_file_exists(self): self.pidfile.__exit__(None, None, None) with open(self.filename, 'w') as fh: fh.write('123\n') with mock.patch.object(PIDFile, 'is_process_running') as mock_method: mock_method.return_value = True exception = assert_raises(SystemExit, PIDFile, self.filename) assert_in('Daemon running as 123', str(exception))
def test_remove(self): # In cache fh_wrapper = self.manager.open(self.file1.name) assert_in(fh_wrapper.name, self.manager.cache) self.manager.remove(fh_wrapper) assert_not_in(fh_wrapper.name, self.manager.cache) # Not in cache self.manager.remove(fh_wrapper) assert_not_in(fh_wrapper.name, self.manager.cache)
def test_build_action_run_collection(self): collection = ActionRunFactory.build_action_run_collection( self.job_run, self.action_runner, ) assert_equal(collection.action_graph, self.action_graph) assert_in('act1', collection.run_map) assert_in('act2', collection.run_map) assert_length(collection.run_map, 2) assert_equal(collection.run_map['act1'].action_name, 'act1')
def test_invalid_named_update(self): test_config = dict(bozray=None) expected_message = "Unknown keys in NamedConfigFragment : bozray" exception = assert_raises( ConfigError, validate_fragment, 'foo', test_config, ) assert_in(expected_message, str(exception))
def test__str__(self): self.collection._is_run_blocked = lambda r: r.action_name != 'cleanup' expected = [ "ActionRunCollection", "second_name(scheduled:blocked)", "action_name(scheduled:blocked)", "cleanup(scheduled)", ] for expectation in expected: assert_in(expectation, str(self.collection))
def test_invalid(self): exception = assert_raises( ConfigError, self.validator, 'c', self.context, ) assert_in( 'Value at is not in %s: ' % str(set(self.enum)), str(exception), )
def test_no_actions(self): test_config = dict( jobs=[dict(name='test_job0', node='node0', schedule='constant')], **BASE_CONFIG) expected_message = "Job test_job0 is missing options: actions" exception = assert_raises( ConfigError, valid_config, test_config, ) assert_in(expected_message, str(exception))
def test_overlap_node_and_node_pools(self): tron_config = dict( nodes=[ dict(name="sameName", hostname="localhost"), ], node_pools=[ dict(name="sameName", nodes=["sameNode"]), ], ) expected_msg = "Node and NodePool names must be unique sameName" exception = assert_raises(ConfigError, valid_config, tron_config) assert_in(expected_msg, str(exception))
def test_open(self): # Not yet in cache fh_wrapper = self.manager.open(self.file1.name) assert_in(fh_wrapper.name, self.manager.cache) # Should now be in cache fh_wrapper2 = self.manager.open(self.file1.name) # Same wrapper assert_equal(fh_wrapper, fh_wrapper2) # Different wrapper assert_not_equal(fh_wrapper, self.manager.open(self.file2.name))
def test_build_new_run(self): autospec_method(self.run_collection.remove_old_runs) run_time = datetime.datetime(2012, 3, 14, 15, 9, 26) mock_job = build_mock_job() job_run = self.run_collection.build_new_run( mock_job, run_time, self.mock_node, ) assert_in(job_run, self.run_collection.runs) self.run_collection.remove_old_runs.assert_called_with() assert job_run.run_num == 5 assert job_run.job_name == mock_job.get_name.return_value
def test_build_new_run(self): autospec_method(self.run_collection.remove_old_runs) run_time = datetime.datetime(2012, 3, 14, 15, 9, 26) mock_job = build_mock_job() job_run = self.run_collection.build_new_run( mock_job, run_time, self.mock_node, ) assert_in(job_run, self.run_collection.runs) self.run_collection.remove_old_runs.assert_called_with() assert job_run.run_num == 6 assert job_run.job_name == mock_job.get_name.return_value
def test_build_new_run_manual(self): autospec_method(self.run_collection.remove_old_runs) run_time = datetime.datetime(2012, 3, 14, 15, 9, 26) mock_job = build_mock_job() job_run = self.run_collection.build_new_run( mock_job, run_time, self.mock_node, True, ) assert_in(job_run, self.run_collection.runs) self.run_collection.remove_old_runs.assert_called_with() assert_equal(job_run.run_num, 5) assert job_run.manual
def test_no_actions(self): test_config = dict( jobs=[ dict(name='test_job0', node='node0', schedule='interval 20s') ], **BASE_CONFIG ) expected_message = "Job test_job0 is missing options: actions" exception = assert_raises( ConfigError, valid_config, test_config, ) assert_in(expected_message, str(exception))
def test_cleanup_natural(self): FileHandleManager.set_max_idle_time(1) fh_wrapper1 = self.manager.open(self.file1.name) fh_wrapper2 = self.manager.open(self.file2.name) fh_wrapper1.write("Some things") time.sleep(1.5) fh_wrapper2.write("Other things.") assert_not_in(fh_wrapper1.name, self.manager.cache) assert_in(fh_wrapper2.name, self.manager.cache) # Now that 1 is closed, try writing again fh_wrapper1.write("Some things") assert_in(fh_wrapper1.name, self.manager.cache) assert not fh_wrapper1._fh.closed
def test_empty_actions(self): test_config = dict(jobs=[ dict(name='test_job0', node='node0', schedule='daily 00:30:00 ', actions=None) ], **BASE_CONFIG) expected_message = "Value at config.jobs.Job.test_job0.actions" exception = assert_raises( ConfigError, valid_config, test_config, ) assert_in(expected_message, str(exception))
def test_config_cleanup_name_collision(self): test_config = dict(jobs=[ dict(name='test_job0', node='node0', schedule='daily 00:30:00', actions=[ dict(name=CLEANUP_ACTION_NAME, command='cmd'), ]) ], **BASE_CONFIG) expected_message = "config.jobs.Job.test_job0.actions.Action.cleanup.name" exception = assert_raises( ConfigError, valid_config, test_config, ) assert_in(expected_message, str(exception))
def test_dupe_names(self): test_config = dict(jobs=[ dict(name='test_job0', node='node0', schedule='daily 00:30:00', actions=[ dict(name='action', command='cmd'), dict(name='action', command='cmd'), ]) ], **BASE_CONFIG) expected = "Duplicate name action at config.jobs.Job.test_job0.actions" exception = assert_raises( ConfigError, valid_config, test_config, ) assert_in(expected, str(exception))
def test_config_cleanup_action_name(self): test_config = dict(jobs=[ dict(name='test_job0', node='node0', schedule='daily 00:30:00', actions=[ dict(name='action', command='cmd'), ], cleanup_action=dict(name='gerald', command='cmd')) ], **BASE_CONFIG) expected_msg = "Cleanup actions cannot have custom names" exception = assert_raises( ConfigError, valid_config, test_config, ) assert_in(expected_msg, str(exception))
def test_circular_dependency(self): test_config = dict(jobs=[ dict(name='test_job0', node='node0', schedule='daily 00:30:00', actions=[ dict(name='action1', command='cmd', requires=['action2']), dict(name='action2', command='cmd', requires=['action1']), ]) ], **BASE_CONFIG) expect = "Circular dependency in job.MASTER.test_job0: action1 -> action2" exception = assert_raises( ConfigError, valid_config, test_config, ) assert_in(expect, str(exception))
def test_empty_actions(self): test_config = dict( jobs=[ dict( name='test_job0', node='node0', schedule='interval 20s', actions=None ) ], **BASE_CONFIG ) expected_message = "Value at config.jobs.Job.test_job0.actions" exception = assert_raises( ConfigError, valid_config, test_config, ) assert_in(expected_message, str(exception))
def test_validate_job_no_actions(self): job_config = dict( name="job_name", node="localhost", schedule="constant", actions=[], ) config_context = config_utils.ConfigContext( 'config', ['localhost'], None, None, ) expected_msg = "Required non-empty list at config.Job.job_name.actions" exception = assert_raises( ConfigError, valid_job, job_config, config_context, ) assert_in(expected_msg, str(exception))
def test_validate_job_no_actions(self): job_config = dict( name="job_name", node="localhost", schedule="daily 00:30:00", actions=[], ) config_context = config_utils.ConfigContext( 'config', ['localhost'], None, None, ) expected_msg = "Required non-empty list at config.Job.job_name.actions" exception = assert_raises( ConfigError, valid_job, job_config, config_context, ) assert_in(expected_msg, str(exception))
def test_config_cleanup_name_collision(self): test_config = dict( jobs=[ dict( name='test_job0', node='node0', schedule='interval 20s', actions=[ dict(name=CLEANUP_ACTION_NAME, command='cmd'), ] ) ], **BASE_CONFIG ) expected_message = "config.jobs.Job.test_job0.actions.Action.cleanup.name" exception = assert_raises( ConfigError, valid_config, test_config, ) assert_in(expected_message, str(exception))
def test_cleanup_many(self): fh_wrappers = [ self.manager.open(self.file1.name), self.manager.open(self.file2.name), self.manager.open(NamedTemporaryFile('r').name), self.manager.open(NamedTemporaryFile('r').name), self.manager.open(NamedTemporaryFile('r').name), ] for i, fh_wrapper in enumerate(fh_wrappers): fh_wrapper.last_accessed = 123456 + i def time_func(): return 123460.1 self.manager.cleanup(time_func) assert_equal(len(self.manager.cache), 2) for fh_wrapper in fh_wrappers[:3]: assert_not_in(fh_wrapper.name, self.manager.cache) for fh_wrapper in fh_wrappers[3:]: assert_in(fh_wrapper.name, self.manager.cache)
def test_config_cleanup_action_name(self): test_config = dict( jobs=[ dict( name='test_job0', node='node0', schedule='interval 20s', actions=[ dict(name='action', command='cmd'), ], cleanup_action=dict(name='gerald', command='cmd') ) ], **BASE_CONFIG ) expected_msg = "Cleanup actions cannot have custom names" exception = assert_raises( ConfigError, valid_config, test_config, ) assert_in(expected_msg, str(exception))
def test_dupe_names(self): test_config = dict( jobs=[ dict( name='test_job0', node='node0', schedule='interval 20s', actions=[ dict(name='action', command='cmd'), dict(name='action', command='cmd'), ] ) ], **BASE_CONFIG ) expected = "Duplicate name action at config.jobs.Job.test_job0.actions" exception = assert_raises( ConfigError, valid_config, test_config, ) assert_in(expected, str(exception))
def test_set_item_no_conflict(self): self.dict['a'] = 'something' assert_in('a', self.dict)