def test_post_validation_success(self): os.environ['SSH_AUTH_SOCK'] = 'something' config = config_parse.valid_ssh_options.validate( self.config, self.context, ) assert_equal(config.agent, True)
def test_perform_with_params(self): assert_equal(self.proxy.perform('equals')(2), [False, True, False]) sometimes = ['sometimes'] * 3 assert_equal( self.proxy.perform('equals')(3, sometimes=True), sometimes, )
def test_configure_from_dict(self): config_data = { 'scribe_host': 'example.com', 'scribe_port': '5555' } config.configure_from_dict(config_data) T.assert_equal(config.scribe_host, config_data['scribe_host'])
def test_from_config(self): config = mock.Mock() runner_factory = actioncommand.SubprocessActionRunnerFactory.from_config( config, ) assert_equal(runner_factory.status_path, config.remote_status_path) assert_equal(runner_factory.exec_path, config.remote_exec_path)
def test_attributes(self): expected = make_named_tron_config( jobs={ 'test_job': make_job( name="test_job", namespace='test_namespace', schedule=ConfigIntervalScheduler( timedelta=datetime.timedelta(0, 20), jitter=None, ), expected_runtime=datetime.timedelta(1), ) } ) test_config = validate_fragment( 'test_namespace', dict( jobs=[ dict( name="test_job", namespace='test_namespace', node="node0", schedule="interval 20s", actions=[dict(name="action", command="command")], cleanup_action=dict(command="command"), ) ] ) ) assert_equal(test_config, expected)
def test_wildcards(self): cfg = parse_groc('every day') assert_equal(cfg.ordinals, None) assert_equal(cfg.monthdays, None) assert_equal(cfg.weekdays, None) assert_equal(cfg.months, None) assert_equal(cfg.timestr, '00:00')
def test_read_raw_config(self): name = 'name' path = os.path.join(self.temp_dir, name) manager.write(path, self.content) self.manifest.get_file_name.return_value = path config = self.manager.read_raw_config(name) assert_equal(config, yaml.dump(self.content))
def test_repr(self, mock_many): result = self.adapter.get_repr() assert_equal(result['name'], self.pool.get_name.return_value) mock_many.assert_called_with( adapter.NodeAdapter, self.pool.get_nodes.return_value, )
def test_get_url_from_identifier_action_run(self): identifier = get_object_type_from_identifier( self.index, 'MASTER.nameb.7.run', ) assert_equal(identifier.url, '/api/jobs/MASTER.nameb/7/run') assert_equal(identifier.type, TronObjectType.action_run)
def test_get_url_from_identifier_job(self): identifier = get_object_type_from_identifier( self.index, 'MASTER.namea', ) assert_equal(identifier.url, '/api/jobs/MASTER.namea') assert_equal(identifier.type, TronObjectType.job)
def assert_validation(self, schedule, expected, mock_schedulers): context = config_utils.NullConfigContext config = schedule_parse.valid_schedule(schedule, context) mock_schedulers.__getitem__.assert_called_with('cron') func = mock_schedulers.__getitem__.return_value assert_equal(config, func.return_value) func.assert_called_with(expected, context)
def test_from_config(self): name = 'the pool name' nodes = [create_mock_node(), create_mock_node()] config = mock.Mock(name=name) new_pool = node.NodePool.from_config(config, nodes) assert_equal(new_pool.name, config.name) assert_equal(new_pool.nodes, nodes)
def test_cancel_schedules_a_new_run(self): config = BASIC_CONFIG + dedent( """ jobs: - name: "a_job" node: local schedule: "daily 05:00:00" actions: - name: "first_action" command: "echo OK" """ ) self.start_with_config(config) job_name = 'MASTER.a_job' job_url = self.client.get_url(job_name) self.sandbox.tronctl('cancel', '%s.0' % job_name) def wait_on_cancel(): return len(self.client.job(job_url)['runs']) == 2 sandbox.wait_on_sandbox(wait_on_cancel) run_states = [run['state'] for run in self.client.job(job_url)['runs']] expected = [ actionrun.ActionRun.SCHEDULED, actionrun.ActionRun.CANCELLED, ] assert_equal(run_states, expected)
def test_failure_on_multi_step_job_doesnt_wedge_tron(self): config = BASIC_CONFIG + dedent( """ jobs: - name: "random_failure_job" node: local queueing: true schedule: "constant" actions: - name: "fa" command: "sleep 0.1; failplz" - name: "sa" command: "echo 'you will never see this'" requires: [fa] """ ) self.start_with_config(config) job_url = self.client.get_url('MASTER.random_failure_job') def wait_on_random_failure_job(): return len(self.client.job(job_url)['runs']) >= 4 sandbox.wait_on_sandbox(wait_on_random_failure_job) job_runs = self.client.job(job_url)['runs'] expected = [actionrun.ActionRun.FAILED for _ in range(3)] assert_equal([run['state'] for run in job_runs[-3:]], expected)
def test_get_next_to_finish_none(self): next_run = self.run_collection.get_next_to_finish(node="seven") assert_equal(next_run, None) self.job_runs[1].state = None next_run = self.run_collection.get_next_to_finish() assert_equal(next_run, None)
def test_trond_restart_job_with_run_history(self): config = BASIC_CONFIG + textwrap.dedent( """ jobs: - name: fast_job node: local schedule: constant actions: - name: single_act command: "sleep 20 && echo good" """ ) self.start_with_config(config) action_run_url = self.client.get_url('MASTER.fast_job.0.single_act') sandbox.wait_on_state( self.client.action_runs, action_run_url, actionrun.ActionRun.RUNNING, ) self.restart_trond() assert_equal( self.client.job_runs(action_run_url)['state'], actionrun.ActionRun.UNKNOWN, ) next_run_url = self.client.get_url('MASTER.fast_job.-1.single_act') sandbox.wait_on_state( self.client.action_runs, next_run_url, actionrun.ActionRun.RUNNING, )
def test_next_day_weekdays(self): time_spec = trontimespec.TimeSpecification(weekdays=[1, 5]) gen = time_spec.next_day(14, 2012, 3) assert_equal(list(gen), [16, 19, 23, 26, 30]) gen = time_spec.next_day(1, 2012, 3) assert_equal(list(gen), [2, 5, 9, 12, 16, 19, 23, 26, 30])
def test_next_day_monthdays(self): time_spec = trontimespec.TimeSpecification(monthdays=[5, 10, 15]) gen = time_spec.next_day(14, 2012, 3) assert_equal(list(gen), [15]) gen = time_spec.next_day(1, 2012, 3) assert_equal(list(gen), [5, 10, 15])
def test__getitem__last_success(self): item = self.context["last_success#day-1"] expected_date = self.last_success.run_time - datetime.timedelta(days=1) assert_equal(item, str(expected_date.day)) item = self.context["last_success#shortdate"] assert_equal(item, "2012-03-14")
def test_notify(self): handler = mock.MagicMock() self.obs.attach(['a', 'b'], handler) self.obs.notify('a') assert_equal(len(handler.handler.mock_calls), 1) self.obs.notify('b') assert_equal(len(handler.handler.mock_calls), 2)
def test_attach(self): def func(): return 1 self.obs.attach('a', func) assert_equal(len(self.obs._observers), 1) assert_equal(self.obs._observers['a'], [func])
def test_get_file_mapping(self): file_mapping = { 'one': 'a.yaml', 'two': 'b.yaml', } manager.write(self.manifest.filename, file_mapping) assert_equal(self.manifest.get_file_mapping(), file_mapping)
def test_parse_no_month(self): cfg = parse_groc('1st,2nd,3rd,10th day at 00:00') assert_equal(cfg.ordinals, None) assert_equal(cfg.monthdays, {1, 2, 3, 10}) assert_equal(cfg.weekdays, None) assert_equal(cfg.months, None) assert_equal(cfg.timestr, '00:00')
def test_close_with_write(self): # Test close with a write self.fh_wrapper.write("some things") self.fh_wrapper.close() assert_equal(self.fh_wrapper._fh, NullFileHandle) assert_equal(self.fh_wrapper.manager, self.manager) # This is somewhat coupled assert_not_in(self.fh_wrapper, self.manager.cache)
def test_start_action_runs(self): startable_runs = [ mock.create_autospec(actionrun.ActionRun) for _ in range(3) ] self.job_run.action_runs.get_startable_action_runs = lambda: startable_runs started_runs = self.job_run._start_action_runs() assert_equal(started_runs, startable_runs)
def test_save(self): assert self.buffer.save(1, 2) assert not self.buffer.save(1, 3) assert not self.buffer.save(1, 4) assert not self.buffer.save(1, 5) assert not self.buffer.save(1, 6) assert self.buffer.save(1, 7) assert_equal(self.buffer.buffer[1], 7)
def test_display_scheduler_with_jitter(self): source = { 'value': '5 minutes', 'type': 'interval', 'jitter': ' (+/- 2 min)', } result = display.display_scheduler(source) assert_equal(result, 'interval 5 minutes%s' % (source['jitter']))
def test_build_url_request_with_data(self): data = {'param': 'is_set', 'other': 1} request = client.build_url_request(self.url, data) assert request.has_header('User-agent') assert_equal(request.get_method(), 'POST') assert_equal(request.get_full_url(), self.url) assert_in('param=is_set', request.data.decode()) assert_in('other=1', request.data.decode())
def test_parse_monthly(self): for test_str in ('1st day', '1st day of month'): cfg = parse_groc(test_str) assert_equal(cfg.ordinals, None) assert_equal(cfg.monthdays, {1}) assert_equal(cfg.weekdays, None) assert_equal(cfg.months, None) assert_equal(cfg.timestr, '00:00')
def test_parse(self, mock_dow, mock_month, mock_monthday, mock_hour, mock_min): line = '* * * * *' actual = crontab.parse_crontab(line) assert_equal(actual['minutes'], mock_min.return_value) assert_equal(actual['hours'], mock_hour.return_value) assert_equal(actual['monthdays'], mock_monthday.return_value) assert_equal(actual['months'], mock_month.return_value) assert_equal(actual['weekdays'], mock_dow.return_value)
def test_pad_sequence_empty(self): expected = ["a", "a"] assert_equal(schedule_parse.pad_sequence([], 2, "a"), expected)
def test_get_config_content_new(self): self.manager.__contains__.return_value = False content = self.controller._get_config_content('name') assert_equal(content, self.controller.DEFAULT_NAMED_CONFIG) assert not self.manager.read_raw_config.call_count
def test_delete_config_hash_mismatch(self): name, content, config_hash = None, "", mock.Mock() error = self.controller.delete_config(name, content, config_hash) assert_equal(error, "Configuration has changed. Please try again.")
def test_get_job_url_for_action_run(self): url = client.get_job_url('MASTER.name.1.act') assert_equal(url, '/api/jobs/MASTER.name/1/act')
def test_create_task_with_configuration(self, mock_task): cluster = MesosCluster( 'mesos-cluster-a.me', default_volumes=[ { 'container_path': '/tmp', 'host_path': '/host', 'mode': 'RO', }, { 'container_path': '/other', 'host_path': '/other', 'mode': 'RW', }, ], dockercfg_location='some_place', offer_timeout=202, ) mock_serializer = mock.MagicMock() task = cluster.create_task( action_run_id='action_c', command='echo hi', cpus=1, mem=10, disk=20, constraints=[], docker_image='container:latest', docker_parameters=[], env={'TESTING': 'true'}, # This should override the default volume for /tmp extra_volumes=[ { 'container_path': '/tmp', 'host_path': '/custom', 'mode': 'RW', }, ], serializer=mock_serializer, ) cluster.runner.TASK_CONFIG_INTERFACE.assert_called_once_with( name='action_c', cmd='echo hi', cpus=1, mem=10, disk=20, constraints=[], image='container:latest', docker_parameters=[], environment={'TESTING': 'true'}, volumes=[ { 'container_path': '/tmp', 'host_path': '/custom', 'mode': 'RW', }, { 'container_path': '/other', 'host_path': '/other', 'mode': 'RW', }, ], uris=['some_place'], offer_timeout=202, ) assert_equal(task, mock_task.return_value) mock_task.assert_called_once_with( 'action_c', cluster.runner.TASK_CONFIG_INTERFACE.return_value, mock_serializer, )
def test_get_url_from_identifier_job_no_namespace_not_master(self): identifier = get_object_type_from_identifier(self.index, 'nameg') assert_equal(identifier.url, '/api/jobs/OTHER.nameg') assert_equal(identifier.type, TronObjectType.job)
def test_set_enabled_on_already(self): cluster = MesosCluster('mesos-cluster-a.me', enabled=True) cluster.set_enabled(True) assert_equal(cluster.enabled, True) # Runner should have only be created once assert_equal(self.mock_runner_cls.call_count, 1)
def test_get_cluster_repeated_mesos_address(self): first = MesosClusterRepository.get_cluster('master-a.com') second = MesosClusterRepository.get_cluster('master-a.com') assert_equal(first, second) assert_equal(self.cluster_cls.call_count, 1)
def test_request_success(self): self.mock_urlopen.return_value = build_file_mock(b'{"ok": "ok"}') response = client.request(self.url) expected = client.Response(None, None, {'ok': 'ok'}) assert_equal(response, expected)
def test_request_url_error(self, _): self.mock_urlopen.side_effect = URLError('broke') response = client.request(self.url) expected = client.Response(client.URL_ERROR, 'broke', None) assert_equal(response, expected)
def test_load_response_content_success(self, _): content = b'not:valid:json' http_response = build_file_mock(content) response = client.load_response_content(http_response) assert_equal(response.error, client.DECODE_ERROR) assert_equal(response.content, content.decode('utf-8'))
def test_get_job_url_for_job(self): url = client.get_job_url('MASTER.name') assert_equal(url, '/api/jobs/MASTER.name')
def test_request_success(self): ok_response = {'ok': 'ok'} client.request.return_value = client.Response(None, None, ok_response) response = self.client.request('/jobs') assert_equal(response, ok_response)
def test_end_to_end_basic(self): self.start_with_config(SINGLE_ECHO_CONFIG) client = self.sandbox.client assert_equal( self.client.config('MASTER')['config'], SINGLE_ECHO_CONFIG, ) # reconfigure and confirm results second_config = DOUBLE_ECHO_CONFIG + TOUCH_CLEANUP_FMT self.sandbox.tronfig(second_config) assert_equal(client.config('MASTER')['config'], second_config) # reconfigure, by uploading a third configuration self.sandbox.tronfig(ALT_NAMESPACED_ECHO_CONFIG, name='ohce') self.sandbox.client.home() # run the job and check its output echo_job_name = 'MASTER.echo_job' job_url = client.get_url(echo_job_name) action_url = client.get_url('MASTER.echo_job.1.echo_action') self.sandbox.tronctl('start', echo_job_name) def wait_on_cleanup(): return (len(client.job(job_url)['runs']) >= 2 and client.action_runs(action_url)['state'] == actionrun.ActionRun.SUCCEEDED) sandbox.wait_on_sandbox(wait_on_cleanup) echo_action_run = client.action_runs(action_url) another_action_url = client.get_url( 'MASTER.echo_job.1.another_echo_action', ) other_act_run = client.action_runs(another_action_url) assert_equal( echo_action_run['state'], actionrun.ActionRun.SUCCEEDED, ) assert_equal(echo_action_run['stdout'], ['Echo!']) assert_equal( other_act_run['state'], actionrun.ActionRun.FAILED, ) now = datetime.datetime.now() stdout = now.strftime( 'Today is %Y-%m-%d, which is the same as %Y-%m-%d', ) assert_equal(other_act_run['stdout'], [stdout]) job_runs_url = client.get_url('%s.1' % echo_job_name) assert_equal( client.job_runs(job_runs_url)['state'], actionrun.ActionRun.FAILED, )
def test_get_namespaces(self): result = self.controller.get_namespaces() self.manager.get_namespaces.assert_called_with() assert_equal(result, self.manager.get_namespaces.return_value)
def test_create_default_action_command_no_config(self): config = () factory = actioncommand.create_action_runner_factory_from_config( config, ) assert_equal(type(factory), actioncommand.NoActionRunnerFactory)
def test_get_config_content_old(self): self.manager.__contains__.return_value = True name = 'the_name' content = self.controller._get_config_content(name) assert_equal(content, self.manager.read_raw_config.return_value) self.manager.read_raw_config.assert_called_with(name)
def test_create_default_action_command(self): config = schema.ConfigActionRunner('none', None, None) factory = actioncommand.create_action_runner_factory_from_config( config, ) assert_equal(type(factory), actioncommand.NoActionRunnerFactory)
def test_valid_config(self): config = self.validate('5 0 L * *') assert_equal(config.minutes, [5]) assert_equal(config.months, None) assert_equal(config.monthdays, ['LAST'])
def test__eq__true(self): first = actioncommand.SubprocessActionRunnerFactory('a', 'b') second = actioncommand.SubprocessActionRunnerFactory('a', 'b') assert_equal(first, second)
def test_pad_negative_size(self): assert_equal(schedule_parse.pad_sequence([], -2, "a"), [])
def test_init(self): assert_equal(self.ac.state, ActionCommand.PENDING)
def test_init_no_serializer(self): ac = ActionCommand("action.1.do", "do") ac.write_stdout("something") ac.write_stderr("else") assert_equal(ac.stdout, filehandler.NullFileHandle) ac.done()
def test_started(self): assert self.ac.started() assert self.ac.start_time is not None assert_equal(self.ac.state, ActionCommand.RUNNING)
def test_pad_sequence_exact(self): expected = [0, 1, 2, 3] assert_equal(schedule_parse.pad_sequence(range(4), 4), expected)
def test_exited(self): self.ac.started() assert self.ac.exited(123) assert_equal(self.ac.exit_status, 123) assert self.ac.end_time is not None
def test_tronfig(self): self.start_with_config(SINGLE_ECHO_CONFIG) stdout, stderr = self.sandbox.tronfig() assert_equal(stdout.rstrip(), SINGLE_ECHO_CONFIG.rstrip())
def test_build_url_request_no_data(self): request = client.build_url_request(self.url, None) assert request.has_header('User-agent') assert_equal(request.get_method(), 'GET') assert_equal(request.get_full_url(), self.url)
def test_configure_tasks(self): cluster = MesosCluster( 'mesos-cluster-a.me', default_volumes=[], dockercfg_location='first', offer_timeout=60, ) assert_equal(cluster.default_volumes, []) assert_equal(cluster.dockercfg_location, 'first') assert_equal(cluster.offer_timeout, 60) expected_volumes = [{ 'container_path': '/tmp', 'host_path': '/host', 'mode': 'RO', }] cluster.configure_tasks( default_volumes=expected_volumes, dockercfg_location='second', offer_timeout=300, ) assert_equal(cluster.default_volumes, expected_volumes) assert_equal(cluster.dockercfg_location, 'second') assert_equal(cluster.offer_timeout, 300)
def test_return_code(exc): assert_equal(exc.returncode, 1)