def test_create_job_startup_fails(self): mock_context = FakeAuroraCommandContext() with contextlib.nested( patch('threading._Event.wait'), patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context)): mock_context.add_expected_status_query_result( self.create_mock_status_query_result(ScheduleStatus.PENDING)) mock_context.add_expected_status_query_result( self.create_mock_status_query_result(ScheduleStatus.RUNNING)) # We need to override the side_effect behavior of check_status in the context. def check_status_side_effect(*args): return self.create_error_response() mock_context.get_api("west").check_status.side_effect = check_status_side_effect api = mock_context.get_api('west') api.create_job.return_value = self.get_createjob_response() with temporary_file() as fp: fp.write(self.get_valid_config()) fp.flush() cmd = AuroraCommandLine() result = cmd.execute(['job', 'create', '--wait-until=RUNNING', 'west/bozo/test/hello', fp.name]) assert result == EXIT_COMMAND_FAILURE assert mock_context.get_out() == [] assert mock_context.get_err() == ["Error occurred while creating job west/bozo/test/hello"]
def generic_test_successful_hook(self, command_hook): mock_context = FakeAuroraCommandContext() with patch("apache.aurora.client.cli.jobs.Job.create_context", return_value=mock_context): mock_query = self.create_mock_query() mock_context.add_expected_status_query_result( self.create_mock_status_query_result(ScheduleStatus.INIT)) mock_context.add_expected_status_query_result( self.create_mock_status_query_result(ScheduleStatus.RUNNING)) mock_context.add_expected_status_query_result( self.create_mock_status_query_result(ScheduleStatus.RUNNING)) mock_context.get_api("west").check_status.side_effect = ( lambda x: self.create_mock_status_query_result(ScheduleStatus.RUNNING)) api = mock_context.get_api("west") api.create_job.return_value = self.get_createjob_response() with temporary_file() as fp: fp.write(self.get_valid_config()) fp.flush() cmd = AuroraCommandLine() cmd.execute(["job", "create", "--wait-until=RUNNING", "west/bozo/test/hello", fp.name]) self.assert_create_job_called(api) self.assert_scheduler_called(api, mock_query, 1) assert command_hook.ran_pre assert command_hook.ran_post
def test_update_status_json(self): mock_context = FakeAuroraCommandContext() api = mock_context.get_api('west') api.query_job_updates.return_value = self.get_status_query_response() api.get_job_update_details.return_value = self.get_update_details_response() with contextlib.nested( patch('apache.aurora.client.cli.update.Update.create_context', return_value=mock_context), patch('apache.aurora.client.factory.CLUSTERS', new=self.TEST_CLUSTERS)): cmd = AuroraCommandLine() result = cmd.execute(["beta-update", "status", "--write-json", "west/mcc/test/hello"]) assert result == EXIT_OK mock_context.get_api("west").query_job_updates.assert_called_with(jobKey=AuroraJobKey( 'west', 'mcc', 'test', 'hello')) mock_context.get_api("west").get_job_update_details.assert_called_with('hello') print("============\n%s\n============" % mock_context.get_out_str()) assert mock_context.get_out_str() == textwrap.dedent("""\ { "status": "ROLLING_FORWARD", "last_updated": 14114056030, "started": 1411404927, "update_events": [ { "status": "ROLLING_FORWARD", "timestampMs": 1411404927 }, { "status": "ROLL_FORWARD_PAUSED", "timestampMs": 1411405000 }, { "status": "ROLLING_FORWARD", "timestampMs": 1411405100 } ], "job": "west/mcc/test/hello", "updateId": "fake-update-identifier", "instance_update_events": [ { "action": "INSTANCE_UPDATING", "instance": 1, "timestamp": 1411404930 }, { "action": "INSTANCE_UPDATING", "instance": 2, "timestamp": 1411404940 }, { "action": "INSTANCE_UPDATED", "instance": 1, "timestamp": 1411404950 }, { "action": "INSTANCE_UPDATED", "instance": 2, "timestamp": 1411404960 } ] }""")
def test_update_status(self): mock_context = FakeAuroraCommandContext() api = mock_context.get_api('west') api.query_job_updates.return_value = self.get_status_query_response() api.get_job_update_details.return_value = self.get_update_details_response() with contextlib.nested( patch('apache.aurora.client.cli.update.Update.create_context', return_value=mock_context), patch('apache.aurora.client.factory.CLUSTERS', new=self.TEST_CLUSTERS)): cmd = AuroraCommandLine() result = cmd.execute(["beta-update", "status", "west/mcc/test/hello"]) assert result == EXIT_OK assert mock_context.get_out() == [ "Job: west/mcc/test/hello, UpdateID: fake-update-identifier", "Started YYYY-MM-DD HH:MM:SS, last updated: YYYY-MM-DD HH:MM:SS", "Current status: ROLLING_FORWARD", "Update events:", " Status: ROLLING_FORWARD at YYYY-MM-DD HH:MM:SS", " Status: ROLL_FORWARD_PAUSED at YYYY-MM-DD HH:MM:SS", " Status: ROLLING_FORWARD at YYYY-MM-DD HH:MM:SS", "Instance events:", " Instance 1 at YYYY-MM-DD HH:MM:SS: INSTANCE_UPDATING", " Instance 2 at YYYY-MM-DD HH:MM:SS: INSTANCE_UPDATING", " Instance 1 at YYYY-MM-DD HH:MM:SS: INSTANCE_UPDATED", " Instance 2 at YYYY-MM-DD HH:MM:SS: INSTANCE_UPDATED"] mock_context.get_api("west").query_job_updates.assert_called_with(jobKey=AuroraJobKey( 'west', 'mcc', 'test', 'hello'))
def test_skip_hooks_in_create(self): """Run a test of create, with a hook that should forbid running create, but with a user who's covered by one of the hook skip exception rules - so it should succeed. """ GlobalCommandHookRegistry.reset() command_hook = HookForTesting(True) GlobalCommandHookRegistry.register_command_hook(command_hook) command_hook_two = SecondHookForTesting(False) GlobalCommandHookRegistry.register_command_hook(command_hook_two) mock_response = Mock() mock_context = FakeAuroraCommandContext() with contextlib.nested( patch("apache.aurora.client.cli.jobs.Job.create_context", return_value=mock_context), patch("requests.get", return_value=mock_response), patch("getpass.getuser", return_value="bozo")): mock_response.json.return_value = { "a": { "users": ["bozo", "clown"], "commands": {"job": ["killall", "create"]}, "hooks": ["test_hook", "second"] }, "b": { "commands": {"user": ["kick"]}, } } mock_query = self.create_mock_query() mock_context.add_expected_status_query_result( self.create_mock_status_query_result(ScheduleStatus.INIT)) mock_context.add_expected_status_query_result( self.create_mock_status_query_result(ScheduleStatus.RUNNING)) mock_context.get_api("west").check_status.side_effect = ( lambda x: self.create_mock_status_query_result(ScheduleStatus.RUNNING)) api = mock_context.get_api("west") api.create_job.return_value = self.get_createjob_response() GlobalCommandHookRegistry.setup("http://foo.bar") with temporary_file() as fp: fp.write(self.get_valid_config()) fp.flush() cmd = AuroraCommandLine() result = cmd.execute(["job", "create", "--skip-hooks=second", "--wait-until=RUNNING", "west/bozo/test/hello", fp.name]) assert result == 0 self.assert_create_job_called(api) self.assert_scheduler_called(api, mock_query, 1) assert command_hook.ran_pre assert command_hook.ran_post
def test_plugin_runs_in_create_job(self): """Run a test of the "create" command against a mocked-out API: Verifies that the creation command sends the right API RPCs, and performs the correct tests on the result.""" # We'll patch out create_context, which will give us a fake context # object, and everything can be stubbed through that. mock_context = FakeAuroraCommandContext() with patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context): # After making the client, create sets up a job monitor. # The monitor uses TaskQuery to get the tasks. It's called at least twice:once before # the job is created, and once after. So we need to set up mocks for the query results. mock_query = self.create_mock_query() mock_context.add_expected_status_query_result( self.create_mock_status_query_result(ScheduleStatus.INIT)) mock_context.add_expected_status_query_result( self.create_mock_status_query_result(ScheduleStatus.RUNNING)) api = mock_context.get_api('west') api.create_job.return_value = self.get_createjob_response() # This is the real test: invoke create as if it had been called by the command line. with temporary_file() as fp: fp.write(self.get_valid_config()) fp.flush() cmd = AuroraCommandLine() cmd.register_plugin(BogusPlugin()) cmd.execute(['job', 'create', '--bogosity=maximum', '--wait_until=RUNNING', 'west/bozo/test/hello', fp.name]) # Now check that the right API calls got made. # Check that create_job was called exactly once, with an AuroraConfig parameter. self.assert_create_job_called(api) self.assert_scheduler_called(api, mock_query, 2) # Check that the plugin did its job. assert mock_context.bogosity == "maximum"
def test_create_job_with_failed_hook(self): GlobalCommandHookRegistry.reset() command_hook = HookForTesting(False) GlobalCommandHookRegistry.register_command_hook(command_hook) mock_context = FakeAuroraCommandContext() with patch("apache.aurora.client.cli.jobs.Job.create_context", return_value=mock_context): mock_context.add_expected_status_query_result( self.create_mock_status_query_result(ScheduleStatus.INIT)) mock_context.add_expected_status_query_result( self.create_mock_status_query_result(ScheduleStatus.RUNNING)) api = mock_context.get_api("west") api.create_job.return_value = self.get_createjob_response() with temporary_file() as fp: fp.write(self.get_valid_config()) fp.flush() cmd = AuroraCommandLine() result = cmd.execute([ "job", "create", "--wait-until=RUNNING", "west/bozo/test/hello", fp.name ]) assert result == 1 assert api.create_job.call_count == 0 assert command_hook.ran_pre assert not command_hook.ran_post
def generic_test_successful_hook(self, command_hook): mock_context = FakeAuroraCommandContext() with patch("apache.aurora.client.cli.jobs.Job.create_context", return_value=mock_context): mock_query = self.create_mock_query() mock_context.add_expected_status_query_result( self.create_mock_status_query_result(ScheduleStatus.INIT)) mock_context.add_expected_status_query_result( self.create_mock_status_query_result(ScheduleStatus.RUNNING)) api = mock_context.get_api("west") api.create_job.return_value = self.get_createjob_response() with temporary_file() as fp: fp.write(self.get_valid_config()) fp.flush() cmd = AuroraCommandLine() cmd.execute([ "job", "create", "--wait-until=RUNNING", "west/bozo/test/hello", fp.name ]) self.assert_create_job_called(api) self.assert_scheduler_called(api, mock_query, 1) assert command_hook.ran_pre assert command_hook.ran_post
def test_status_wildcard(self): """Test status using a wildcard. It should first call api.get_jobs, and then do a getTasksWithoutConfigs on each job.""" mock_context = FakeAuroraCommandContext() mock_api = mock_context.get_api('west') mock_api.check_status.return_value = self.create_status_response() mock_api.get_jobs.return_value = self.create_getjobs_response() with contextlib.nested( patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context), patch('apache.aurora.client.cli.context.CLUSTERS', new=self.TEST_CLUSTERS)): cmd = AuroraCommandLine() cmd.execute(['job', 'status', '*']) # Wildcard should have expanded to two jobs, so there should be two calls # to check_status. assert mock_api.check_status.call_count == 2 assert mock_api.check_status.call_args_list[0][0][0].cluster == 'west' assert mock_api.check_status.call_args_list[0][0][0].role == 'RoleA' assert mock_api.check_status.call_args_list[0][0][0].env == 'test' assert mock_api.check_status.call_args_list[0][0][0].name == 'hithere' assert mock_api.check_status.call_args_list[1][0][0].cluster == 'west' assert mock_api.check_status.call_args_list[1][0][0].role == 'bozo' assert mock_api.check_status.call_args_list[1][0][0].env == 'test' assert mock_api.check_status.call_args_list[1][0][0].name == 'hello'
def test_kill_job_with_instances_batched_maxerrors_output(self): """Test kill client-side API logic.""" mock_context = FakeAuroraCommandContext() with contextlib.nested( patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context), patch('apache.aurora.client.factory.CLUSTERS', new=self.TEST_CLUSTERS)): api = mock_context.get_api('west') status_result = self.create_status_call_result() mock_context.add_expected_status_query_result(status_result) api.kill_job.return_value = self.create_error_response() with temporary_file() as fp: fp.write(self.get_valid_config()) fp.flush() cmd = AuroraCommandLine() cmd.execute([ 'job', 'kill', '--max-total-failures=1', '--config=%s' % fp.name, 'west/bozo/test/hello/0,2,4-13' ]) assert mock_context.get_out() == [] print(mock_context.get_err()) assert mock_context.get_err() == [ 'Kill of shards [0, 2, 4, 5, 6] failed with error; see log for details', 'Kill of shards [7, 8, 9, 10, 11] failed with error; see log for details', 'Exceeded maximum number of errors while killing instances' ]
def test_create_job_failed(self): """Run a test of the "create" command against a mocked-out API: this time, make the monitor check status several times before successful completion. """ mock_context = FakeAuroraCommandContext() with patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context): mock_context.add_expected_status_query_result( self.create_mock_status_query_result(ScheduleStatus.INIT)) api = mock_context.get_api('west') api.create_job.return_value = self.get_failed_createjob_response() # This is the real test: invoke create as if it had been called by the command line. with temporary_file() as fp: fp.write(self.get_valid_config()) fp.flush() cmd = AuroraCommandLine() result = cmd.execute(['job', 'create', '--wait-until=RUNNING', 'west/bozo/test/hello', fp.name]) assert result == EXIT_COMMAND_FAILURE # Now check that the right API calls got made. # Check that create_job was called exactly once, with an AuroraConfig parameter. self.assert_create_job_called(api) # getTasksStatus was called once, before the create_job assert api.scheduler_proxy.getTasksStatus.call_count == 1
def test_killall_job_output(self): """Test kill output.""" mock_context = FakeAuroraCommandContext() mock_scheduler_proxy = Mock() with contextlib.nested( patch('threading._Event.wait'), patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context), patch('apache.aurora.client.factory.CLUSTERS', new=self.TEST_CLUSTERS)): api = mock_context.get_api('west') mock_scheduler_proxy.getTasksWithoutConfigs.return_value = self.create_status_call_result( ) api.kill_job.return_value = self.get_kill_job_response() mock_scheduler_proxy.killTasks.return_value = self.get_kill_job_response( ) mock_context.add_expected_status_query_result( self.create_status_call_result( self.create_mock_task(ScheduleStatus.KILLED))) with temporary_file() as fp: fp.write(self.get_valid_config()) fp.flush() cmd = AuroraCommandLine() cmd.execute([ 'job', 'killall', '--no-batching', '--config=%s' % fp.name, 'west/bozo/test/hello' ]) assert mock_context.get_out() == ['job killall succeeded'] assert mock_context.get_err() == []
def test_kill_job_with_instances_batched_large(self): """Test kill client-side API logic.""" mock_context = FakeAuroraCommandContext() with contextlib.nested( patch('threading._Event.wait'), patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context), patch('apache.aurora.client.factory.CLUSTERS', new=self.TEST_CLUSTERS)): api = mock_context.get_api('west') status_result = self.create_status_call_result() mock_context.add_expected_status_query_result(status_result) api.kill_job.return_value = self.get_kill_job_response() mock_context.add_expected_status_query_result( self.create_status_call_result( self.create_mock_task(ScheduleStatus.KILLED))) with temporary_file() as fp: fp.write(self.get_valid_config()) fp.flush() cmd = AuroraCommandLine() cmd.execute([ 'job', 'kill', '--config=%s' % fp.name, 'west/bozo/test/hello/0,2,4-13' ]) # Now check that the right API calls got made. assert api.kill_job.call_count == 3 api.kill_job.assert_called_with( AuroraJobKey.from_path('west/bozo/test/hello'), [12, 13]) # Expect total 5 calls (3 from JobMonitor). self.assert_scheduler_called( api, self.get_expected_task_query([12, 13]), 5)
def test_kill_job_with_instances_batched_maxerrors(self): """Test kill client-side API logic.""" mock_context = FakeAuroraCommandContext() with contextlib.nested( patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context), patch('apache.aurora.client.factory.CLUSTERS', new=self.TEST_CLUSTERS)): api = mock_context.get_api('west') status_result = self.create_status_call_result() mock_context.add_expected_status_query_result(status_result) api.kill_job.return_value = self.create_error_response() with temporary_file() as fp: fp.write(self.get_valid_config()) fp.flush() cmd = AuroraCommandLine() cmd.execute([ 'job', 'kill', '--max-total-failures=1', '--config=%s' % fp.name, 'west/bozo/test/hello/0,2,4-13' ]) # Now check that the right API calls got made. We should have aborted after the second batch. assert api.kill_job.call_count == 2 assert api.scheduler_proxy.getTasksWithoutConfigs.call_count == 0
def test_kill_job_with_instances_batched_large(self): """Test kill client-side API logic.""" mock_context = FakeAuroraCommandContext() with contextlib.nested( patch('threading._Event.wait'), patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context), patch('apache.aurora.client.factory.CLUSTERS', new=self.TEST_CLUSTERS)): api = mock_context.get_api('west') status_result = self.create_status_call_result() mock_context.add_expected_status_query_result(status_result) api.kill_job.return_value = self.get_kill_job_response() mock_context.add_expected_status_query_result(self.create_status_call_result( self.create_mock_task(ScheduleStatus.KILLED))) with temporary_file() as fp: fp.write(self.get_valid_config()) fp.flush() cmd = AuroraCommandLine() cmd.execute(['job', 'kill', '--config=%s' % fp.name, 'west/bozo/test/hello/0,2,4-13']) # Now check that the right API calls got made. assert api.kill_job.call_count == 3 api.kill_job.assert_called_with(AuroraJobKey.from_path('west/bozo/test/hello'), [12, 13]) # Expect total 5 calls (3 from JobMonitor). self.assert_scheduler_called(api, self.get_expected_task_query([12, 13]), 5)
def test_kill_job_with_invalid_instances_strict(self): """Test kill client-side API logic.""" mock_context = FakeAuroraCommandContext() with contextlib.nested( patch('threading._Event.wait'), patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context), patch('apache.aurora.client.factory.CLUSTERS', new=self.TEST_CLUSTERS)): api = mock_context.get_api('west') self.setup_get_tasks_status_calls(api.scheduler_proxy) api.kill_job.return_value = self.get_kill_job_response() mock_context.add_expected_status_query_result( self.create_status_call_result( self.create_mock_task(ScheduleStatus.KILLED))) with temporary_file() as fp: fp.write(self.get_valid_config()) fp.flush() cmd = AuroraCommandLine() cmd.execute([ 'job', 'kill', '--config=%s' % fp.name, '--no-batching', '--strict', 'west/bozo/test/hello/0,2,4-6,11-20' ]) # Now check that the right API calls got made. assert api.kill_job.call_count == 0
def test_killall_job_something_else(self): """Test kill client-side API logic.""" mock_context = FakeAuroraCommandContext() mock_scheduler_proxy = Mock() with contextlib.nested( patch('threading._Event.wait'), patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context), patch('apache.aurora.client.factory.CLUSTERS', new=self.TEST_CLUSTERS)): api = mock_context.get_api('west') api.kill_job.return_value = self.get_kill_job_response() mock_context.add_expected_status_query_result(self.create_status_call_result()) mock_scheduler_proxy.killTasks.return_value = self.get_kill_job_response() mock_context.add_expected_status_query_result(self.create_status_call_result( self.create_mock_task(ScheduleStatus.KILLED))) with temporary_file() as fp: fp.write(self.get_valid_config()) fp.flush() cmd = AuroraCommandLine() cmd.execute(['job', 'killall', '--config=%s' % fp.name, 'west/bozo/test/hello']) # Now check that the right API calls got made. assert api.kill_job.call_count == 4 instances = [15, 16, 17, 18, 19] api.kill_job.assert_called_with(AuroraJobKey.from_path('west/bozo/test/hello'), instances) self.assert_scheduler_called(api, self.get_expected_task_query(instances), 6)
def test_killall_job_wait_until_timeout(self): """Test kill client-side API logic.""" mock_context = FakeAuroraCommandContext() mock_scheduler_proxy = Mock() with contextlib.nested( patch('threading._Event.wait'), patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context), patch('apache.aurora.client.factory.CLUSTERS', new=self.TEST_CLUSTERS)): api = mock_context.get_api('west') mock_scheduler_proxy.getTasksWithoutConfigs.return_value = self.create_status_call_result() api.kill_job.return_value = self.get_kill_job_response() mock_scheduler_proxy.killTasks.return_value = self.get_kill_job_response() for _ in range(8): mock_context.add_expected_status_query_result(self.create_status_call_result( self.create_mock_task(ScheduleStatus.RUNNING))) with temporary_file() as fp: fp.write(self.get_valid_config()) fp.flush() cmd = AuroraCommandLine() assert EXIT_TIMEOUT == cmd.execute( ['job', 'killall', '--no-batching', '--config=%s' % fp.name, 'west/bozo/test/hello']) # Now check that the right API calls got made. assert api.kill_job.call_count == 1 api.kill_job.assert_called_with(AuroraJobKey.from_path('west/bozo/test/hello'), None) self.assert_scheduler_called(api, self.get_expected_task_query(), 8)
def test_kill_job_with_instances_batched_output(self): """Test kill client-side API logic.""" mock_context = FakeAuroraCommandContext() with contextlib.nested( patch('threading._Event.wait'), patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context), patch('apache.aurora.client.factory.CLUSTERS', new=self.TEST_CLUSTERS)): api = mock_context.get_api('west') status_result = self.create_status_call_result() mock_context.add_expected_status_query_result(status_result) api.kill_job.return_value = self.get_kill_job_response() mock_context.add_expected_status_query_result( self.create_status_call_result( self.create_mock_task(ScheduleStatus.KILLED))) with temporary_file() as fp: fp.write(self.get_valid_config()) fp.flush() cmd = AuroraCommandLine() cmd.execute([ 'job', 'kill', '--config=%s' % fp.name, 'west/bozo/test/hello/0,2,4-6' ]) assert mock_context.get_out() == [ 'Successfully killed shards [0, 2, 4, 5, 6]', 'job kill succeeded' ] assert mock_context.get_err() == []
def test_simple_successful_create_job_with_bindings(self): """Run a test of the "create" command against a mocked-out API: Verifies that the creation command sends the right API RPCs, and performs the correct tests on the result.""" mock_context = FakeAuroraCommandContext() with contextlib.nested( patch('threading._Event.wait'), patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context)): mock_query = self.create_mock_query() mock_context.add_expected_status_query_result( self.create_mock_status_query_result(ScheduleStatus.PENDING)) mock_context.add_expected_status_query_result( self.create_mock_status_query_result(ScheduleStatus.RUNNING)) api = mock_context.get_api('west') api.create_job.return_value = self.get_createjob_response() # This is the real test: invoke create as if it had been called by the command line. with temporary_file() as fp: fp.write(self.get_unbound_test_config()) fp.flush() cmd = AuroraCommandLine() cmd.execute(['job', 'create', '--wait-until=RUNNING', '--bind', 'cluster_binding=west', '--bind', 'instances_binding=20', '--bind', 'TEST_BATCH=1', 'west/bozo/test/hello', fp.name]) # Now check that the right API calls got made. # Check that create_job was called exactly once, with an AuroraConfig parameter. self.assert_create_job_called(api) self.assert_scheduler_called(api, mock_query, 2)
def test_simple_successful_create_job_with_bindings(self): """Run a test of the "create" command against a mocked-out API: Verifies that the creation command sends the right API RPCs, and performs the correct tests on the result.""" mock_context = FakeAuroraCommandContext() with contextlib.nested( patch('threading._Event.wait'), patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context)): mock_query = self.create_mock_query() mock_context.add_expected_status_query_result( self.create_mock_status_query_result(ScheduleStatus.PENDING)) mock_context.add_expected_status_query_result( self.create_mock_status_query_result(ScheduleStatus.RUNNING)) api = mock_context.get_api('west') api.create_job.return_value = self.get_createjob_response() mock_context.enable_reveal_errors() # This is the real test: invoke create as if it had been called by the command line. with temporary_file() as fp: fp.write(self.get_unbound_test_config()) fp.flush() cmd = AuroraCommandLine() cmd.execute(['job', 'create', '--wait-until=RUNNING', '--bind', 'cluster_binding=west', '--bind', 'instances_binding=20', 'west/bozo/test/hello', fp.name]) # Now check that the right API calls got made. # Check that create_job was called exactly once, with an AuroraConfig parameter. self.assert_create_job_called(api) self.assert_scheduler_called(api, mock_query, 2)
def test_simple_successful_create_job_output(self): """Run a test of the "create" command against a mocked-out API: Verifies that the creation command generates the correct output. """ mock_context = FakeAuroraCommandContext() with contextlib.nested( patch('threading._Event.wait'), patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context)): mock_context.add_expected_status_query_result( self.create_mock_status_query_result(ScheduleStatus.PENDING)) mock_context.add_expected_status_query_result( self.create_mock_status_query_result(ScheduleStatus.RUNNING)) mock_context.add_expected_status_query_result( self.create_mock_status_query_result(ScheduleStatus.RUNNING)) api = mock_context.get_api('west') api.create_job.return_value = self.get_createjob_response() with temporary_file() as fp: fp.write(self.get_valid_config()) fp.flush() cmd = AuroraCommandLine() result = cmd.execute(['job', 'create', '--wait-until=RUNNING', 'west/bozo/test/hello', fp.name]) assert result == EXIT_OK assert mock_context.get_out() == [ "job create succeeded: job url=http://something_or_other/scheduler/bozo/test/hello"] assert mock_context.get_err() == []
def test_empty_plugins_in_create_job(self): """Installs a plugin that doesn't implement any of the plugin methods. Prior to AURORA-362, this would cause the client to crash with an empty argument list. """ mock_context = FakeAuroraCommandContext() with patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context): mock_query = self.create_mock_query() mock_context.add_expected_status_query_result( self.create_mock_status_query_result(ScheduleStatus.INIT)) mock_context.add_expected_status_query_result( self.create_mock_status_query_result(ScheduleStatus.RUNNING)) api = mock_context.get_api('west') api.create_job.return_value = self.get_createjob_response() with temporary_file() as fp: fp.write(self.get_valid_config()) fp.flush() cmd = AuroraCommandLine() cmd.register_plugin(EmptyPlugin()) cmd.execute([ 'job', 'create', '--wait-until=RUNNING', 'west/bozo/test/hello', fp.name ]) self.assert_create_job_called(api) self.assert_scheduler_called(api, mock_query, 1)
def test_killall_job(self): """Test kill client-side API logic.""" mock_context = FakeAuroraCommandContext() mock_scheduler_proxy = Mock() with contextlib.nested( patch('threading._Event.wait'), patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context), patch('apache.aurora.client.factory.CLUSTERS', new=self.TEST_CLUSTERS)): api = mock_context.get_api('west') mock_scheduler_proxy.getTasksWithoutConfigs.return_value = self.create_status_call_result( ) api.kill_job.return_value = self.get_kill_job_response() mock_scheduler_proxy.killTasks.return_value = self.get_kill_job_response( ) mock_context.add_expected_status_query_result( self.create_status_call_result( self.create_mock_task(ScheduleStatus.KILLED))) with temporary_file() as fp: fp.write(self.get_valid_config()) fp.flush() cmd = AuroraCommandLine() cmd.execute([ 'job', 'killall', '--no-batching', '--config=%s' % fp.name, 'west/bozo/test/hello' ]) # Now check that the right API calls got made. assert api.kill_job.call_count == 1 api.kill_job.assert_called_with( AuroraJobKey.from_path('west/bozo/test/hello'), None) self.assert_scheduler_called(api, self.get_expected_task_query(), 2)
def test_create_job_fail_and_write_log(self): """Check that when an unknown error occurs during command execution, the command-line framework catches it, and writes out an error log file containing the details of the error, including the command-line arguments passed to aurora to execute the command, and the stack trace of the error. """ mock_context = FakeAuroraCommandContext() with contextlib.nested( patch('time.time', return_value=23), patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context)): api = mock_context.get_api('west') api.create_job.side_effect = UnknownException() with temporary_file() as fp: fp.write(self.get_valid_config()) fp.flush() cmd = AuroraCommandLine() result = cmd.execute(['job', 'create', '--wait-until=RUNNING', '--error-log-dir=./logged-errors', 'west/bozo/test/hello', fp.name]) assert result == EXIT_UNKNOWN_ERROR with open("./logged-errors/aurora-23.error-log", "r") as logfile: error_log = logfile.read() assert error_log.startswith("ERROR LOG: command arguments = %s" % ['job', 'create', '--wait-until=RUNNING', '--error-log-dir=./logged-errors', 'west/bozo/test/hello', fp.name]) assert "Traceback" in error_log if os.path.exists("./logged-errors"): shutil.rmtree("./logged-errors")
def test_kill_job_with_empty_instances_batched(self): """Test kill client-side API logic.""" mock_context = FakeAuroraCommandContext() with contextlib.nested( patch('threading._Event.wait'), patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context), patch('apache.aurora.client.factory.CLUSTERS', new=self.TEST_CLUSTERS)): api = mock_context.get_api('west') # set up an empty instance list in the getTasksWithoutConfigs response status_response = self.create_simple_success_response() schedule_status = Mock(spec=ScheduleStatusResult) status_response.result.scheduleStatusResult = schedule_status schedule_status.tasks = [] mock_context.add_expected_status_query_result(status_response) api.kill_job.return_value = self.get_kill_job_response() with temporary_file() as fp: fp.write(self.get_valid_config()) fp.flush() cmd = AuroraCommandLine() cmd.execute([ 'job', 'kill', '--config=%s' % fp.name, 'west/bozo/test/hello/0,2,4-13' ]) # Now check that the right API calls got made. assert api.kill_job.call_count == 0
def test_start_update_command_line_succeeds(self): mock_context = FakeAuroraCommandContext() with contextlib.nested( patch('apache.aurora.client.cli.update.Update.create_context', return_value=mock_context), patch('apache.aurora.client.factory.CLUSTERS', new=self.TEST_CLUSTERS)): mock_api = mock_context.get_api('west') mock_api.start_job_update.return_value = self.create_simple_success_response( ) with temporary_file() as fp: fp.write(self.get_valid_config()) fp.flush() cmd = AuroraCommandLine() result = cmd.execute( ['update', 'start', self.TEST_JOBSPEC, fp.name]) assert result == EXIT_OK assert mock_api.start_job_update.call_count == 1 args, kwargs = mock_api.start_job_update.call_args assert isinstance(args[0], AuroraConfig) assert args[1] is None assert mock_context.get_out() == [ "Scheduler-driven update of job west/bozo/test/hello has started." ] assert mock_context.get_err() == []
def test_skip_hooks_in_create(self): """Run a test of create, with a hook that should forbid running create, but with a user who's covered by one of the hook skip exception rules - so it should succeed. """ GlobalCommandHookRegistry.reset() command_hook = HookForTesting(True) GlobalCommandHookRegistry.register_command_hook(command_hook) command_hook_two = SecondHookForTesting(False) GlobalCommandHookRegistry.register_command_hook(command_hook_two) mock_response = Mock() mock_context = FakeAuroraCommandContext() with contextlib.nested( patch("apache.aurora.client.cli.jobs.Job.create_context", return_value=mock_context), patch("requests.get", return_value=mock_response), patch("getpass.getuser", return_value="bozo")): mock_response.json.return_value = { "a": { "users": ["bozo", "clown"], "commands": { "job": ["killall", "create"] }, "hooks": ["test_hook", "second"] }, "b": { "commands": { "user": ["kick"] }, } } mock_query = self.create_mock_query() mock_context.add_expected_status_query_result( self.create_mock_status_query_result(ScheduleStatus.INIT)) mock_context.add_expected_status_query_result( self.create_mock_status_query_result(ScheduleStatus.RUNNING)) api = mock_context.get_api("west") api.create_job.return_value = self.get_createjob_response() GlobalCommandHookRegistry.setup("http://foo.bar") with temporary_file() as fp: fp.write(self.get_valid_config()) fp.flush() cmd = AuroraCommandLine() result = cmd.execute([ "job", "create", "--skip-hooks=second", "--wait-until=RUNNING", "west/bozo/test/hello", fp.name ]) assert result == 0 self.assert_create_job_called(api) self.assert_scheduler_called(api, mock_query, 1) assert command_hook.ran_pre assert command_hook.ran_post
def test_successful_status_shallow_nometadata(self): """Regression test: there was a crasher bug when metadata was None.""" mock_context = FakeAuroraCommandContext() mock_api = mock_context.get_api('west') mock_api.check_status.return_value = self.create_status_null_metadata() with contextlib.nested( patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context)): cmd = AuroraCommandLine() cmd.execute(['job', 'status', 'west/bozo/test/hello']) mock_api.check_status.assert_called_with(AuroraJobKey('west', 'bozo', 'test', 'hello'))
def test_simple_successful_cancel_update(self): """Run a test of the "kill" command against a mocked-out API: Verifies that the kill command sends the right API RPCs, and performs the correct tests on the result.""" mock_context = FakeAuroraCommandContext() mock_api = mock_context.get_api('west') mock_api.cancel_update.return_value = self.create_simple_success_response() with patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context): cmd = AuroraCommandLine() cmd.execute(['job', 'cancel_update', 'west/bozo/test/hello']) self.assert_cancel_update_called(mock_api)
def test_successful_status_shallow(self): """Test the status command at the shallowest level: calling status should end up invoking the local APIs get_status method.""" mock_context = FakeAuroraCommandContext() mock_api = mock_context.get_api('west') mock_api.check_status.return_value = self.create_status_response() with contextlib.nested( patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context)): cmd = AuroraCommandLine() cmd.execute(['job', 'status', 'west/bozo/test/hello']) mock_api.check_status.assert_called_with(AuroraJobKey('west', 'bozo', 'test', 'hello'))
def test_status_job_not_found(self): """Regression test: there was a crasher bug when metadata was None.""" mock_context = FakeAuroraCommandContext() mock_api = mock_context.get_api('west') mock_api.check_status.return_value = self.create_empty_status() with contextlib.nested( patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context)): cmd = AuroraCommandLine() result = cmd.execute(['job', 'status', 'west/bozo/test/hello']) assert result == EXIT_INVALID_PARAMETER assert mock_context.get_err() == ["Found no jobs matching west/bozo/test/hello"]
def test_unsuccessful_status_shallow(self): """Test the status command at the shallowest level: calling status should end up invoking the local APIs get_status method.""" # Calls api.check_status, which calls scheduler_proxy.getJobs mock_context = FakeAuroraCommandContext() mock_api = mock_context.get_api('west') mock_api.check_status.return_value = self.create_failed_status_response() with contextlib.nested( patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context)): cmd = AuroraCommandLine() result = cmd.execute(['job', 'status', 'west/bozo/test/hello']) assert result == EXIT_INVALID_PARAMETER
def test_no_jobs_found_status_shallow(self): # Calls api.check_status, which calls scheduler_proxy.getJobs mock_context = FakeAuroraCommandContext() mock_api = mock_context.get_api('west') mock_api.check_status.return_value = self.create_nojobs_status_response() with contextlib.nested( patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context)): cmd = AuroraCommandLine() result = cmd.execute(['job', 'status', '--write-json', 'west/bozo/test/hello']) assert mock_context.get_out() == [ '{"jobspec":"west/bozo/test/hello","error":"No matching jobs found"}'] assert result == EXIT_OK
def test_successful_status_shallow(self): """Test the status command at the shallowest level: calling status should end up invoking the local APIs get_status method.""" mock_context = FakeAuroraCommandContext() mock_api = mock_context.get_api('west') mock_api.check_status.return_value = self.create_status_response() with contextlib.nested( patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context)): cmd = AuroraCommandLine() cmd.execute(['job', 'status', 'west/bozo/test/hello']) mock_api.check_status.assert_called_with( AuroraJobKey('west', 'bozo', 'test', 'hello'))
def test_successful_status_shallow_nometadata(self): """Regression test: there was a crasher bug when metadata was None.""" mock_context = FakeAuroraCommandContext() mock_api = mock_context.get_api('west') mock_api.check_status.return_value = self.create_status_null_metadata() with contextlib.nested( patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context)): cmd = AuroraCommandLine() cmd.execute(['job', 'status', 'west/bozo/test/hello']) mock_api.check_status.assert_called_with( AuroraJobKey('west', 'bozo', 'test', 'hello'))
def test_status_job_not_found(self): """Regression test: there was a crasher bug when metadata was None.""" mock_context = FakeAuroraCommandContext() mock_api = mock_context.get_api('west') mock_api.check_status.return_value = self.create_empty_status() with contextlib.nested( patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context)): cmd = AuroraCommandLine() result = cmd.execute(['job', 'status', 'west/bozo/test/hello']) assert result == EXIT_INVALID_PARAMETER assert mock_context.get_err() == ["No matching jobs found"]
def test_simple_successful_cancel_update(self): """Run a test of the "kill" command against a mocked-out API: Verifies that the kill command sends the right API RPCs, and performs the correct tests on the result.""" mock_context = FakeAuroraCommandContext() mock_api = mock_context.get_api('west') mock_api.cancel_update.return_value = self.create_simple_success_response( ) with patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context): cmd = AuroraCommandLine() cmd.execute(['job', 'cancel-update', 'west/bozo/test/hello']) self.assert_cancel_update_called(mock_api)
def test_update_invalid_config(self): mock_context = FakeAuroraCommandContext() with contextlib.nested( patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context), patch('apache.aurora.client.factory.CLUSTERS', new=self.TEST_CLUSTERS)): mock_api = mock_context.get_api('west') with temporary_file() as fp: fp.write(self.get_invalid_config('invalid_field=False,')) fp.flush() cmd = AuroraCommandLine() result = cmd.execute(['job', 'update', '--force', self.TEST_JOBSPEC, fp.name]) assert result == EXIT_INVALID_CONFIGURATION assert mock_api.update_job.call_count == 0
def test_cannot_skip_hooks_in_create(self): """This time, the hook shouldn't be skippable, because we use a username who isn't allowed by the hook exception rule. """ GlobalCommandHookRegistry.reset() command_hook = HookForTesting(True) GlobalCommandHookRegistry.register_command_hook(command_hook) command_hook_two = SecondHookForTesting(False) GlobalCommandHookRegistry.register_command_hook(command_hook_two) mock_response = Mock() mock_context = FakeAuroraCommandContext() with contextlib.nested( patch("apache.aurora.client.cli.jobs.Job.create_context", return_value=mock_context), patch("requests.get", return_value=mock_response), patch("getpass.getuser", return_value="beezus")): mock_response.json.return_value = { "a": { "users": ["bozo", "clown"], "commands": { "job": ["killall", "create"] }, "hooks": ["test_hook", "second"] }, "b": { "commands": { "user": ["kick"] }, } } mock_context.add_expected_status_query_result( self.create_mock_status_query_result(ScheduleStatus.INIT)) mock_context.add_expected_status_query_result( self.create_mock_status_query_result(ScheduleStatus.RUNNING)) api = mock_context.get_api("west") api.create_job.return_value = self.get_createjob_response() GlobalCommandHookRegistry.setup("http://foo.bar") with temporary_file() as fp: fp.write(self.get_valid_config()) fp.flush() cmd = AuroraCommandLine() result = cmd.execute([ "job", "create", "--skip-hooks=second", "--wait-until=RUNNING", "west/bozo/test/hello", fp.name ]) # Check that it returns the right error code, and that create_job didn't get called. assert result == EXIT_PERMISSION_VIOLATION assert api.create_job.call_count == 0
def test_schedule_cron_failed_invalid_config(self): mock_context = FakeAuroraCommandContext() with patch("apache.aurora.client.cli.cron.CronNoun.create_context", return_value=mock_context): with temporary_file() as fp: fp.write(self.get_invalid_config("invalid_clause=oops")) fp.flush() cmd = AuroraCommandLine() result = cmd.execute(["cron", "schedule", "west/bozo/test/hello", fp.name]) assert result == EXIT_INVALID_CONFIGURATION # Now check that the right API calls got made. # Check that create_job was not called. api = mock_context.get_api("west") assert api.schedule_cron.call_count == 0
def test_unsuccessful_status_shallow(self): """Test the status command at the shallowest level: calling status should end up invoking the local APIs get_status method.""" # Calls api.check_status, which calls scheduler_proxy.getJobs mock_context = FakeAuroraCommandContext() mock_api = mock_context.get_api('west') mock_api.check_status.return_value = self.create_failed_status_response( ) with contextlib.nested( patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context)): cmd = AuroraCommandLine() result = cmd.execute(['job', 'status', 'west/bozo/test/hello']) assert result == EXIT_INVALID_PARAMETER
def test_schedule_failed(self): mock_context = FakeAuroraCommandContext() with patch("apache.aurora.client.cli.cron.CronNoun.create_context", return_value=mock_context): api = mock_context.get_api("west") api.schedule_cron.return_value = self.create_error_response() with temporary_file() as fp: fp.write(self.get_valid_config()) fp.flush() cmd = AuroraCommandLine() result = cmd.execute(["cron", "schedule", "west/bozo/test/hello", fp.name]) assert result == EXIT_API_ERROR # Now check that the right API calls got made. # Check that create_job was called exactly once, with an AuroraConfig parameter. assert api.schedule_cron.call_count == 1
def test_update_invalid_config(self): mock_context = FakeAuroraCommandContext() with contextlib.nested( patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context), patch('apache.aurora.client.factory.CLUSTERS', new=self.TEST_CLUSTERS)): mock_api = mock_context.get_api('west') with temporary_file() as fp: fp.write(self.get_invalid_config('invalid_field=False,')) fp.flush() cmd = AuroraCommandLine() result = cmd.execute( ['job', 'update', '--force', self.TEST_JOBSPEC, fp.name]) assert result == EXIT_INVALID_CONFIGURATION assert mock_api.update_job.call_count == 0
def test_unknown_error(self): mock_context = FakeAuroraCommandContext(reveal=False) with contextlib.nested( patch('time.sleep'), patch('apache.aurora.client.cli.jobs.Job.create_context', side_effect=Exception("Argh"))): api = mock_context.get_api('west') api.create_job.return_value = self.get_createjob_response() with temporary_file() as fp: fp.write(self.get_valid_config()) fp.flush() cmd = AuroraCommandLine() result = cmd.execute(['job', 'create', '--wait-until=RUNNING', 'west/bozo/test/hello', fp.name]) assert result == EXIT_UNKNOWN_ERROR assert api.create_job.call_count == 0
def test_schedule_cron_failed_invalid_config(self): mock_context = FakeAuroraCommandContext() with patch('apache.aurora.client.cli.cron.CronNoun.create_context', return_value=mock_context): with temporary_file() as fp: fp.write(self.get_invalid_config('invalid_clause=oops')) fp.flush() cmd = AuroraCommandLine() result = cmd.execute( ['cron', 'schedule', 'west/bozo/test/hello', fp.name]) assert result == EXIT_INVALID_CONFIGURATION # Now check that the right API calls got made. # Check that create_job was not called. api = mock_context.get_api('west') assert api.schedule_cron.call_count == 0
def test_list_updates_command(self): mock_context = FakeAuroraCommandContext() mock_context.get_api('west').query_job_updates.return_value = self.get_status_query_response() with contextlib.nested( patch('apache.aurora.client.cli.update.Update.create_context', return_value=mock_context), patch('apache.aurora.client.factory.CLUSTERS', new=self.TEST_CLUSTERS)): cmd = AuroraCommandLine() result = cmd.execute(["beta-update", "list", "west", "--user=me"]) assert result == EXIT_OK assert mock_context.get_out_str() == textwrap.dedent("""\ Job: west/mcc/test/hello, Id: hello, User: me, Status: ROLLING_FORWARD Created: 1411404927, Last Modified 14114056030 Job: west/mch/prod/goodbye, Id: goodbye, User: me, Status: ROLLING_BACK Created: 1411300632, Last Modified 14114092632 Job: west/mcq/devel/gasp, Id: gasp, User: me, Status: ROLL_FORWARD_PAUSED Created: 1411600891, Last Modified 1411800891""")
def test_status_wildcard_two(self): """Test status using a wildcard. It should first call api.get_jobs, and then do a getTasksStatus on each job. This time, use a pattern that doesn't match all of the jobs.""" mock_context = FakeAuroraCommandContext() mock_api = mock_context.get_api('west') mock_api.check_status.return_value = self.create_status_response() mock_api.get_jobs.return_value = self.create_getjobs_response() with contextlib.nested( patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context)): cmd = AuroraCommandLine() cmd.execute(['job', 'status', 'example/*/*/hello']) # Wildcard should have expanded to two jobs, but only matched one, # so there should be one call to check_status. assert mock_api.check_status.call_count == 1 mock_api.check_status.assert_called_with( AuroraJobKey('example', 'bozo', 'test', 'hello'))
def test_update_command_line_succeeds(self): mock_context = FakeAuroraCommandContext() with contextlib.nested( patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context), patch('apache.aurora.client.factory.CLUSTERS', new=self.TEST_CLUSTERS)): mock_api = mock_context.get_api('west') with temporary_file() as fp: fp.write(self.get_valid_config()) fp.flush() cmd = AuroraCommandLine() cmd.execute(['job', 'update', '--force', self.TEST_JOBSPEC, fp.name]) assert mock_api.update_job.call_count == 1 args, kwargs = mock_api.update_job.call_args assert isinstance(args[0], AuroraConfig) assert args[1] == 3 assert args[2] is None
def test_schedule_failed(self): mock_context = FakeAuroraCommandContext() with patch('apache.aurora.client.cli.cron.CronNoun.create_context', return_value=mock_context): api = mock_context.get_api('west') api.schedule_cron.return_value = self.create_error_response() with temporary_file() as fp: fp.write(self.get_valid_config()) fp.flush() cmd = AuroraCommandLine() result = cmd.execute( ['cron', 'schedule', 'west/bozo/test/hello', fp.name]) assert result == EXIT_API_ERROR # Now check that the right API calls got made. # Check that create_job was called exactly once, with an AuroraConfig parameter. assert api.schedule_cron.call_count == 1
def test_interrupt(self): mock_context = FakeAuroraCommandContext() with contextlib.nested( patch('time.sleep'), patch('apache.aurora.client.cli.jobs.Job.create_context', side_effect=KeyboardInterrupt())): api = mock_context.get_api('west') api.create_job.return_value = self.get_createjob_response() with temporary_file() as fp: fp.write(self.get_valid_config()) fp.flush() cmd = AuroraCommandLine() result = cmd.execute(['job', 'create', '--wait-until=RUNNING', 'west/bozo/test/hello', fp.name]) assert result == EXIT_INTERRUPTED assert api.create_job.call_count == 0
def test_resume_update_command_line_succeeds(self): mock_context = FakeAuroraCommandContext() with contextlib.nested( patch('apache.aurora.client.cli.update.Update.create_context', return_value=mock_context), patch('apache.aurora.client.factory.CLUSTERS', new=self.TEST_CLUSTERS)): mock_api = mock_context.get_api('west') mock_api.resume_job_update.return_value = self.create_simple_success_response() with temporary_file() as fp: fp.write(self.get_valid_config()) fp.flush() cmd = AuroraCommandLine() result = cmd.execute(['beta-update', 'resume', self.TEST_JOBSPEC]) assert result == EXIT_OK mock_api.resume_job_update.assert_called_with(self.TEST_JOBKEY) assert mock_context.get_out() == [ "Scheduler-driven update of job west/bozo/test/hello has been resumed."]
def test_create_job_failed_invalid_config(self): """Run a test of the "create" command against a mocked-out API, with a configuration containing a syntax error""" mock_context = FakeAuroraCommandContext() with patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context): with temporary_file() as fp: fp.write(self.get_invalid_config('invalid_clause=oops')) fp.flush() cmd = AuroraCommandLine() result = cmd.execute(['job', 'create', '--wait-until=RUNNING', 'west/bozo/test/hello', fp.name]) assert result == EXIT_INVALID_CONFIGURATION # Now check that the right API calls got made. # Check that create_job was not called. api = mock_context.get_api('west') assert api.create_job.call_count == 0 assert api.scheduler_proxy.test_simple_successful_create_job.call_count == 0
def test_status_wildcard_two(self): """Test status using a wildcard. It should first call api.get_jobs, and then do a getTasksWithoutConfigs on each job. This time, use a pattern that doesn't match all of the jobs.""" mock_context = FakeAuroraCommandContext() mock_api = mock_context.get_api('west') mock_api.check_status.return_value = self.create_status_response() mock_api.get_jobs.return_value = self.create_getjobs_response() with contextlib.nested( patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context)): cmd = AuroraCommandLine() cmd.execute(['job', 'status', 'example/*/*/hello']) # Wildcard should have expanded to two jobs, but only matched one, # so there should be one call to check_status. assert mock_api.check_status.call_count == 1 mock_api.check_status.assert_called_with( AuroraJobKey('example', 'bozo', 'test', 'hello'))
def test_command_invocation_logging(self): """Sets up a log handler, registers it with the logger, and then verifies that calls to the client logging methods correctly get captured in the logs. """ mock_log_handler = MockHandler() logger = logging.getLogger('aurora_client') logger.setLevel(logging.DEBUG) logger.addHandler(mock_log_handler) # We'll patch out create_context, which will give us a fake context # object, and everything can be stubbed through that. mock_context = FakeAuroraCommandContext() with patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context): # After making the client, create sets up a job monitor. # The monitor uses TaskQuery to get the tasks. It's called at least twice:once before # the job is created, and once after. So we need to set up mocks for the query results. mock_context.add_expected_status_query_result( self.create_mock_status_query_result(ScheduleStatus.INIT)) mock_context.add_expected_status_query_result( self.create_mock_status_query_result(ScheduleStatus.RUNNING)) api = mock_context.get_api('west') api.create_job.return_value = self.get_createjob_response() with temporary_file() as fp: fp.write(self.get_valid_config()) fp.flush() cmd = AuroraCommandLine() cmd.execute([ 'job', 'create', '--wait-until=RUNNING', 'west/bozo/test/hello', fp.name ]) # Check that things were logged correctly: # there should be at least two entries, with the clientid and username; # and one entry should log the command being invoked. assert any(( "'job', 'create', '--wait-until=RUNNING', 'west/bozo/test/hello'" in r.getMessage()) for r in mock_log_handler.logs) assert mock_log_handler.logs[0].clientid == mock_log_handler.logs[ 1].clientid assert mock_log_handler.logs[0].user == mock_log_handler.logs[ 1].user
def test_create_job_failed_output(self): """Test that a failed create generates the correct error messages""" mock_context = FakeAuroraCommandContext() with patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context): mock_context.add_expected_status_query_result( self.create_mock_status_query_result(ScheduleStatus.INIT)) api = mock_context.get_api('west') api.create_job.return_value = self.get_failed_createjob_response() with temporary_file() as fp: fp.write(self.get_valid_config()) fp.flush() cmd = AuroraCommandLine() result = cmd.execute(['job', 'create', '--wait-until=RUNNING', 'west/bozo/test/hello', fp.name]) assert result == EXIT_COMMAND_FAILURE # Check that create_job was called exactly once, with an AuroraConfig parameter. print("Out=%s\nErr=%s" % (mock_context.get_out(), mock_context.get_err())) assert mock_context.get_out() == [] assert mock_context.get_err() == ["job create failed because of scheduler error"]