def test_update_status(self):
    mock_context = FakeAuroraCommandContext()
    api = mock_context.get_api('west')
    api.query_job_updates.return_value = self.get_status_query_response()
    api.get_job_update_details.return_value = self.get_update_details_response()

    with contextlib.nested(
        patch('apache.aurora.client.cli.update.Update.create_context', return_value=mock_context),
        patch('apache.aurora.client.factory.CLUSTERS', new=self.TEST_CLUSTERS)):
      cmd = AuroraCommandLine()
      result = cmd.execute(["beta-update", "status", "west/mcc/test/hello"])
      assert result == EXIT_OK
      print("============\n%s\n============" % mock_context.get_out())
      assert mock_context.get_out() == [
          "Job: west/mcc/test/hello, UpdateID: fake-update-identifier",
          "Started YYYY-MM-DD HH:MM:SS, last updated: YYYY-MM-DD HH:MM:SS",
          "Current status: ROLLING_FORWARD",
          "Update events:",
          "  Status: ROLLING_FORWARD at YYYY-MM-DD HH:MM:SS",
          "  Status: ROLL_FORWARD_PAUSED at YYYY-MM-DD HH:MM:SS",
          "  Status: ROLLING_FORWARD at YYYY-MM-DD HH:MM:SS",
          "Instance events:",
          "  Instance 1 at YYYY-MM-DD HH:MM:SS: INSTANCE_UPDATING",
          "  Instance 2 at YYYY-MM-DD HH:MM:SS: INSTANCE_UPDATING",
          "  Instance 1 at YYYY-MM-DD HH:MM:SS: INSTANCE_UPDATED",
          "  Instance 2 at YYYY-MM-DD HH:MM:SS: INSTANCE_UPDATED"]
      mock_context.get_api("west").query_job_updates.assert_called_with(jobKey=AuroraJobKey(
          'west', 'mcc', 'test', 'hello'))
Esempio n. 2
0
  def test_simple_successful_create_job_output(self):
    """Run a test of the "create" command against a mocked-out API:
    Verifies that the creation command generates the correct output.
    """
    mock_context = FakeAuroraCommandContext()
    with contextlib.nested(
        patch('threading._Event.wait'),
        patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context)):
      mock_context.add_expected_status_query_result(
        self.create_mock_status_query_result(ScheduleStatus.PENDING))
      mock_context.add_expected_status_query_result(
        self.create_mock_status_query_result(ScheduleStatus.RUNNING))
      mock_context.add_expected_status_query_result(
        self.create_mock_status_query_result(ScheduleStatus.RUNNING))
      api = mock_context.get_api('west')
      api.create_job.return_value = self.get_createjob_response()

      with temporary_file() as fp:
        fp.write(self.get_valid_config())
        fp.flush()
        cmd = AuroraCommandLine()
        result = cmd.execute(['job', 'create', '--wait-until=RUNNING', 'west/bozo/test/hello',
            fp.name])
        assert result == EXIT_OK
      assert mock_context.get_out() == [
          "job create succeeded: job url=http://something_or_other/scheduler/bozo/test/hello"]
      assert mock_context.get_err() == []
Esempio n. 3
0
    def test_start_update_command_line_succeeds(self):
        mock_context = FakeAuroraCommandContext()
        with contextlib.nested(
                patch('apache.aurora.client.cli.update.Update.create_context',
                      return_value=mock_context),
                patch('apache.aurora.client.factory.CLUSTERS',
                      new=self.TEST_CLUSTERS)):
            mock_api = mock_context.get_api('west')
            mock_api.start_job_update.return_value = self.create_simple_success_response(
            )
            with temporary_file() as fp:
                fp.write(self.get_valid_config())
                fp.flush()
                cmd = AuroraCommandLine()
                result = cmd.execute(
                    ['update', 'start', self.TEST_JOBSPEC, fp.name])
                assert result == EXIT_OK

            assert mock_api.start_job_update.call_count == 1
            args, kwargs = mock_api.start_job_update.call_args
            assert isinstance(args[0], AuroraConfig)
            assert args[1] is None
            assert mock_context.get_out() == [
                "Scheduler-driven update of job west/bozo/test/hello has started."
            ]
            assert mock_context.get_err() == []
Esempio n. 4
0
    def test_killall_job_output(self):
        """Test kill output."""
        mock_context = FakeAuroraCommandContext()
        mock_scheduler_proxy = Mock()
        with contextlib.nested(
                patch('threading._Event.wait'),
                patch('apache.aurora.client.cli.jobs.Job.create_context',
                      return_value=mock_context),
                patch('apache.aurora.client.factory.CLUSTERS',
                      new=self.TEST_CLUSTERS)):

            api = mock_context.get_api('west')
            mock_scheduler_proxy.getTasksWithoutConfigs.return_value = self.create_status_call_result(
            )
            api.kill_job.return_value = self.get_kill_job_response()
            mock_scheduler_proxy.killTasks.return_value = self.get_kill_job_response(
            )
            mock_context.add_expected_status_query_result(
                self.create_status_call_result(
                    self.create_mock_task(ScheduleStatus.KILLED)))
            with temporary_file() as fp:
                fp.write(self.get_valid_config())
                fp.flush()
                cmd = AuroraCommandLine()
                cmd.execute([
                    'job', 'killall', '--no-batching',
                    '--config=%s' % fp.name, 'west/bozo/test/hello'
                ])
            assert mock_context.get_out() == ['job killall succeeded']
            assert mock_context.get_err() == []
Esempio n. 5
0
    def test_kill_job_with_instances_batched_output(self):
        """Test kill client-side API logic."""
        mock_context = FakeAuroraCommandContext()
        with contextlib.nested(
                patch('threading._Event.wait'),
                patch('apache.aurora.client.cli.jobs.Job.create_context',
                      return_value=mock_context),
                patch('apache.aurora.client.factory.CLUSTERS',
                      new=self.TEST_CLUSTERS)):
            api = mock_context.get_api('west')
            status_result = self.create_status_call_result()
            mock_context.add_expected_status_query_result(status_result)
            api.kill_job.return_value = self.get_kill_job_response()
            mock_context.add_expected_status_query_result(
                self.create_status_call_result(
                    self.create_mock_task(ScheduleStatus.KILLED)))

            with temporary_file() as fp:
                fp.write(self.get_valid_config())
                fp.flush()
                cmd = AuroraCommandLine()
                cmd.execute([
                    'job', 'kill',
                    '--config=%s' % fp.name, 'west/bozo/test/hello/0,2,4-6'
                ])

        assert mock_context.get_out() == [
            'Successfully killed shards [0, 2, 4, 5, 6]', 'job kill succeeded'
        ]
        assert mock_context.get_err() == []
Esempio n. 6
0
    def test_kill_job_with_instances_batched_maxerrors_output(self):
        """Test kill client-side API logic."""
        mock_context = FakeAuroraCommandContext()
        with contextlib.nested(
                patch('apache.aurora.client.cli.jobs.Job.create_context',
                      return_value=mock_context),
                patch('apache.aurora.client.factory.CLUSTERS',
                      new=self.TEST_CLUSTERS)):
            api = mock_context.get_api('west')
            status_result = self.create_status_call_result()
            mock_context.add_expected_status_query_result(status_result)
            api.kill_job.return_value = self.create_error_response()

            with temporary_file() as fp:
                fp.write(self.get_valid_config())
                fp.flush()
                cmd = AuroraCommandLine()
                cmd.execute([
                    'job', 'kill', '--max-total-failures=1',
                    '--config=%s' % fp.name, 'west/bozo/test/hello/0,2,4-13'
                ])

            assert mock_context.get_out() == []
            print(mock_context.get_err())
            assert mock_context.get_err() == [
                'Kill of shards [0, 2, 4, 5, 6] failed with error; see log for details',
                'Kill of shards [7, 8, 9, 10, 11] failed with error; see log for details',
                'Exceeded maximum number of errors while killing instances'
            ]
Esempio n. 7
0
 def test_successful_status_output_no_metadata(self):
   """Test the status command more deeply: in a request with a fully specified
   job, it should end up doing a query using getTasksWithoutConfigs."""
   mock_context = FakeAuroraCommandContext()
   mock_context.add_expected_status_query_result(self.create_status_null_metadata())
   with contextlib.nested(
       patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context),
       patch('apache.aurora.client.factory.CLUSTERS', new=self.TEST_CLUSTERS)):
     cmd = AuroraCommandLine()
     cmd.execute(['job', 'status', 'west/bozo/test/hello'])
     actual = re.sub("\\d\\d:\\d\\d:\\d\\d", "##:##:##", '\n'.join(mock_context.get_out()))
     expected = textwrap.dedent("""\
         Active tasks (3):
         \tTask role: bozo, env: test, name: woops, instance: 1, status: RUNNING on slavehost
         \t  cpus: 2, ram: 2 MB, disk: 2 MB
         \t  events:
         \t   1970-11-23 ##:##:## RUNNING: Hi there
         \tTask role: bozo, env: test, name: woops, instance: 2, status: RUNNING on slavehost
         \t  cpus: 2, ram: 2 MB, disk: 2 MB
         \t  events:
         \t   1970-11-23 ##:##:## RUNNING: Hi there
         \tTask role: bozo, env: test, name: woops, instance: 3, status: RUNNING on slavehost
         \t  cpus: 2, ram: 2 MB, disk: 2 MB
         \t  events:
         \t   1970-11-23 ##:##:## RUNNING: Hi there
         Inactive tasks (0):
         """)
     assert actual == expected
Esempio n. 8
0
  def test_create_job_startup_fails(self):
    mock_context = FakeAuroraCommandContext()
    with contextlib.nested(
        patch('threading._Event.wait'),
        patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context)):
      mock_context.add_expected_status_query_result(
        self.create_mock_status_query_result(ScheduleStatus.PENDING))
      mock_context.add_expected_status_query_result(
        self.create_mock_status_query_result(ScheduleStatus.RUNNING))

      # We need to override the side_effect behavior of check_status in the context.
      def check_status_side_effect(*args):
        return self.create_error_response()

      mock_context.get_api("west").check_status.side_effect = check_status_side_effect

      api = mock_context.get_api('west')
      api.create_job.return_value = self.get_createjob_response()

      with temporary_file() as fp:
        fp.write(self.get_valid_config())
        fp.flush()
        cmd = AuroraCommandLine()
        result = cmd.execute(['job', 'create', '--wait-until=RUNNING', 'west/bozo/test/hello',
            fp.name])
        assert result == EXIT_COMMAND_FAILURE
      assert mock_context.get_out() == []
      assert mock_context.get_err() == ["Error occurred while creating job west/bozo/test/hello"]
Esempio n. 9
0
 def test_successful_status_output_no_metadata(self):
     """Test the status command more deeply: in a request with a fully specified
 job, it should end up doing a query using getTasksWithoutConfigs."""
     mock_context = FakeAuroraCommandContext()
     mock_context.add_expected_status_query_result(
         self.create_status_null_metadata())
     with contextlib.nested(
             patch('apache.aurora.client.cli.jobs.Job.create_context',
                   return_value=mock_context),
             patch('apache.aurora.client.factory.CLUSTERS',
                   new=self.TEST_CLUSTERS)):
         cmd = AuroraCommandLine()
         cmd.execute(['job', 'status', 'west/bozo/test/hello'])
         actual = re.sub("\\d\\d:\\d\\d:\\d\\d", "##:##:##",
                         '\n'.join(mock_context.get_out()))
         expected = textwrap.dedent("""\
       Active tasks (3):
       \tTask:
       \t  cpus: 2, ram: 2 MB, disk: 2 MB
       \t  events:
       \t   1970-11-23 ##:##:## RUNNING: Hi there
       \tTask:
       \t  cpus: 2, ram: 2 MB, disk: 2 MB
       \t  events:
       \t   1970-11-23 ##:##:## RUNNING: Hi there
       \tTask:
       \t  cpus: 2, ram: 2 MB, disk: 2 MB
       \t  events:
       \t   1970-11-23 ##:##:## RUNNING: Hi there
       Inactive tasks (0):
       """)
         assert actual == expected
Esempio n. 10
0
 def test_get_quota_with_consumed_json(self):
   mock_context = FakeAuroraCommandContext()
   self.setup_mock_quota_call_no_consumed(mock_context)
   with contextlib.nested(
       patch('apache.aurora.client.cli.quota.Quota.create_context', return_value=mock_context),
       patch('apache.aurora.client.factory.CLUSTERS', new=self.TEST_CLUSTERS)):
     cmd = AuroraCommandLine()
     cmd.execute(['quota', 'get', '--write_json', 'west/bozo'])
     out = '\n'.join(mock_context.get_out())
     assert out == '{"quota":{"numCpus":5,"ramMb":20480,"diskMb":40960}}'
Esempio n. 11
0
 def test_get_quota_no_consumed(self):
   mock_context = FakeAuroraCommandContext()
   self.setup_mock_quota_call_no_consumed(mock_context)
   with contextlib.nested(
       patch('apache.aurora.client.cli.quota.Quota.create_context', return_value=mock_context),
       patch('apache.aurora.client.factory.CLUSTERS', new=self.TEST_CLUSTERS)):
     cmd = AuroraCommandLine()
     cmd.execute(['quota', 'get', 'west/bozo'])
     out = '\n'.join(mock_context.get_out())
     assert out == "Allocated:\n  CPU: 5\n  RAM: 20.000000 GB\n  Disk: 40.000000 GB"
Esempio n. 12
0
 def test_get_job_uptime_with_percentiles(self):
   mock_context = FakeAuroraCommandContext()
   self.setup_mock_sla_uptime_vector(mock_context, 915)
   with contextlib.nested(
       patch('apache.aurora.client.cli.sla.Sla.create_context', return_value=mock_context),
       patch('apache.aurora.client.factory.CLUSTERS', new=self.TEST_CLUSTERS)):
     cmd = AuroraCommandLine()
     cmd.execute(['sla', 'get-job-uptime', 'west/role/env/test', '--percentiles=99.9,85.5'])
     out = '\n'.join(mock_context.get_out())
     assert '99.9 percentile\t- 915 seconds' in out
     assert '85.5 percentile\t- 915 seconds' in out
Esempio n. 13
0
  def test_create_job_failed_output(self):
    """Test that a failed create generates the correct error messages"""
    mock_context = FakeAuroraCommandContext()
    with patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context):
      mock_context.add_expected_status_query_result(
          self.create_mock_status_query_result(ScheduleStatus.INIT))
      api = mock_context.get_api('west')
      api.create_job.return_value = self.get_failed_createjob_response()
      with temporary_file() as fp:
        fp.write(self.get_valid_config())
        fp.flush()
        cmd = AuroraCommandLine()
        result = cmd.execute(['job', 'create', '--wait-until=RUNNING',
            'west/bozo/test/hello', fp.name])
        assert result == EXIT_COMMAND_FAILURE

      # Check that create_job was called exactly once, with an AuroraConfig parameter.
      print("Out=%s\nErr=%s" % (mock_context.get_out(), mock_context.get_err()))
      assert mock_context.get_out() == []
      assert mock_context.get_err() == ["job create failed because of scheduler error"]
Esempio n. 14
0
  def test_create_job_failed_output(self):
    """Test that a failed create generates the correct error messages"""
    mock_context = FakeAuroraCommandContext()
    with patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context):
      mock_context.add_expected_status_query_result(
          self.create_mock_status_query_result(ScheduleStatus.INIT))
      api = mock_context.get_api('west')
      api.create_job.return_value = self.get_failed_createjob_response()
      with temporary_file() as fp:
        fp.write(self.get_valid_config())
        fp.flush()
        cmd = AuroraCommandLine()
        result = cmd.execute(['job', 'create', '--wait-until=RUNNING',
            'west/bozo/test/hello', fp.name])
        assert result == EXIT_COMMAND_FAILURE

      # Check that create_job was called exactly once, with an AuroraConfig parameter.
      print("Out=%s\nErr=%s" % (mock_context.get_out(), mock_context.get_err()))
      assert mock_context.get_out() == []
      assert mock_context.get_err() == ["job create failed because of scheduler error"]
Esempio n. 15
0
 def test_no_jobs_found_status_shallow(self):
   # Calls api.check_status, which calls scheduler_proxy.getJobs
   mock_context = FakeAuroraCommandContext()
   mock_api = mock_context.get_api('west')
   mock_api.check_status.return_value = self.create_nojobs_status_response()
   with contextlib.nested(
       patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context)):
     cmd = AuroraCommandLine()
     result = cmd.execute(['job', 'status', '--write-json', 'west/bozo/test/hello'])
     assert mock_context.get_out() == [
       '{"jobspec":"west/bozo/test/hello","error":"No matching jobs found"}']
     assert result == EXIT_OK
Esempio n. 16
0
 def test_get_task_up_count_with_durations(self):
   mock_context = FakeAuroraCommandContext()
   self.setup_mock_sla_uptime_vector(mock_context, 95.3577434734)
   with contextlib.nested(
       patch('apache.aurora.client.cli.sla.Sla.create_context', return_value=mock_context),
       patch('apache.aurora.client.factory.CLUSTERS', new=self.TEST_CLUSTERS)):
     cmd = AuroraCommandLine()
     cmd.execute(['sla', 'get-task-up-count', 'west/role/env/test', '--durations=3m,2d6h,3h'])
     out = '\n'.join(mock_context.get_out())
     assert '3 mins\t- 95.36 %' in out
     assert '54 hrs\t- 95.36 %' in out
     assert '3 hrs\t- 95.36 %' in out
Esempio n. 17
0
  def _get_quota(self, include_consumption, command_args):
    mock_context = FakeAuroraCommandContext()
    if include_consumption:
      self.setup_mock_quota_call_with_consumption(mock_context)
    else:
      self.setup_mock_quota_call_no_consumption(mock_context)

    with contextlib.nested(
        patch('apache.aurora.client.cli.quota.Quota.create_context', return_value=mock_context),
        patch('apache.aurora.client.factory.CLUSTERS', new=self.TEST_CLUSTERS)):
      cmd = AuroraCommandLine()
      cmd.execute(command_args)
      out = '\n'.join(mock_context.get_out())
      return out
Esempio n. 18
0
 def test_get_task_up_count_no_duration(self):
   mock_context = FakeAuroraCommandContext()
   self.setup_mock_sla_uptime_vector(mock_context, 10.6533333333)
   with contextlib.nested(
       patch('apache.aurora.client.cli.sla.Sla.create_context', return_value=mock_context),
       patch('apache.aurora.client.factory.CLUSTERS', new=self.TEST_CLUSTERS)):
     cmd = AuroraCommandLine()
     cmd.execute(['sla', 'get-task-up-count', 'west/role/env/test'])
     out = '\n'.join(mock_context.get_out())
     assert '1 mins\t- 10.65 %\n' in out
     assert '10 mins\t- 10.65 %\n' in out
     assert '1 hrs\t- 10.65 %\n' in out
     assert '12 hrs\t- 10.65 %\n' in out
     assert '7 days\t- 10.65 %' in out
Esempio n. 19
0
    def _get_quota(self, include_consumption, command_args):
        mock_context = FakeAuroraCommandContext()
        if include_consumption:
            self.setup_mock_quota_call_with_consumption(mock_context)
        else:
            self.setup_mock_quota_call_no_consumption(mock_context)

        with contextlib.nested(
                patch('apache.aurora.client.cli.quota.Quota.create_context',
                      return_value=mock_context),
                patch('apache.aurora.client.factory.CLUSTERS',
                      new=self.TEST_CLUSTERS)):
            cmd = AuroraCommandLine()
            cmd.execute(command_args)
            out = '\n'.join(mock_context.get_out())
            return out
Esempio n. 20
0
 def test_get_task_up_count_no_duration(self):
     mock_context = FakeAuroraCommandContext()
     self.setup_mock_sla_uptime_vector(mock_context, 10.6533333333)
     with contextlib.nested(
             patch('apache.aurora.client.cli.sla.Sla.create_context',
                   return_value=mock_context),
             patch('apache.aurora.client.factory.CLUSTERS',
                   new=self.TEST_CLUSTERS)):
         cmd = AuroraCommandLine()
         cmd.execute(['sla', 'get-task-up-count', 'west/role/env/test'])
         out = '\n'.join(mock_context.get_out())
         assert '1 mins\t- 10.65 %\n' in out
         assert '10 mins\t- 10.65 %\n' in out
         assert '1 hrs\t- 10.65 %\n' in out
         assert '12 hrs\t- 10.65 %\n' in out
         assert '7 days\t- 10.65 %' in out
Esempio n. 21
0
 def test_get_job_uptime_with_percentiles(self):
     mock_context = FakeAuroraCommandContext()
     self.setup_mock_sla_uptime_vector(mock_context, 915)
     with contextlib.nested(
             patch('apache.aurora.client.cli.sla.Sla.create_context',
                   return_value=mock_context),
             patch('apache.aurora.client.factory.CLUSTERS',
                   new=self.TEST_CLUSTERS)):
         cmd = AuroraCommandLine()
         cmd.execute([
             'sla', 'get-job-uptime', 'west/role/env/test',
             '--percentiles=99.9,85.5'
         ])
         out = '\n'.join(mock_context.get_out())
         assert '99.9 percentile\t- 915 seconds' in out
         assert '85.5 percentile\t- 915 seconds' in out
Esempio n. 22
0
 def test_list_updates_command(self):
   mock_context = FakeAuroraCommandContext()
   mock_context.get_api('west').query_job_updates.return_value = self.get_status_query_response()
   with contextlib.nested(
       patch('apache.aurora.client.cli.update.Update.create_context', return_value=mock_context),
       patch('apache.aurora.client.factory.CLUSTERS', new=self.TEST_CLUSTERS)):
     cmd = AuroraCommandLine()
     result = cmd.execute(["beta-update", "list", "west", "--user=me"])
     assert result == EXIT_OK
     print("============\n%s\n============" % mock_context.get_out())
     assert mock_context.get_out_str() == textwrap.dedent("""\
         Job: west/mcc/test/hello, Id: hello, User: me, Status: ROLLING_FORWARD
           Created: 1411404927, Last Modified 14114056030
         Job: west/mch/prod/goodbye, Id: goodbye, User: me, Status: ROLLING_BACK
           Created: 1411300632, Last Modified 14114092632
         Job: west/mcq/devel/gasp, Id: gasp, User: me, Status: ROLL_FORWARD_PAUSED
           Created: 1411600891, Last Modified 1411800891""")
Esempio n. 23
0
 def test_get_task_up_count_with_durations(self):
     mock_context = FakeAuroraCommandContext()
     self.setup_mock_sla_uptime_vector(mock_context, 95.3577434734)
     with contextlib.nested(
             patch('apache.aurora.client.cli.sla.Sla.create_context',
                   return_value=mock_context),
             patch('apache.aurora.client.factory.CLUSTERS',
                   new=self.TEST_CLUSTERS)):
         cmd = AuroraCommandLine()
         cmd.execute([
             'sla', 'get-task-up-count', 'west/role/env/test',
             '--durations=3m,2d6h,3h'
         ])
         out = '\n'.join(mock_context.get_out())
         assert '3 mins\t- 95.36 %' in out
         assert '54 hrs\t- 95.36 %' in out
         assert '3 hrs\t- 95.36 %' in out
Esempio n. 24
0
  def test_resume_update_command_line_succeeds(self):
    mock_context = FakeAuroraCommandContext()
    with contextlib.nested(
        patch('apache.aurora.client.cli.update.Update.create_context', return_value=mock_context),
        patch('apache.aurora.client.factory.CLUSTERS', new=self.TEST_CLUSTERS)):
      mock_api = mock_context.get_api('west')
      mock_api.resume_job_update.return_value = self.create_simple_success_response()
      with temporary_file() as fp:
        fp.write(self.get_valid_config())
        fp.flush()
        cmd = AuroraCommandLine()
        result = cmd.execute(['beta-update', 'resume', self.TEST_JOBSPEC])
        assert result == EXIT_OK

      mock_api.resume_job_update.assert_called_with(self.TEST_JOBKEY)
      assert mock_context.get_out() == [
          "Scheduler-driven update of job west/bozo/test/hello has been resumed."]
Esempio n. 25
0
  def test_pause_update_command_line_error(self):
    mock_context = FakeAuroraCommandContext()
    with contextlib.nested(
        patch('apache.aurora.client.cli.update.Update.create_context', return_value=mock_context),
        patch('apache.aurora.client.factory.CLUSTERS', new=self.TEST_CLUSTERS)):
      mock_api = mock_context.get_api('west')
      mock_api.pause_job_update.return_value = self.create_error_response()
      with temporary_file() as fp:
        fp.write(self.get_valid_config())
        fp.flush()
        cmd = AuroraCommandLine()
        result = cmd.execute(['beta-update', 'pause', self.TEST_JOBSPEC])
        assert result == EXIT_API_ERROR

      mock_api.pause_job_update.assert_called_with(self.TEST_JOBKEY)
      assert mock_context.get_out() == []
      assert mock_context.get_err() == [
          "Error: Failed to pause scheduler-driven update; see log for details"]
Esempio n. 26
0
  def test_start_update_command_line_succeeds(self):
    mock_context = FakeAuroraCommandContext()
    with contextlib.nested(
        patch('apache.aurora.client.cli.update.Update.create_context', return_value=mock_context),
        patch('apache.aurora.client.factory.CLUSTERS', new=self.TEST_CLUSTERS)):
      mock_api = mock_context.get_api('west')
      mock_api.start_job_update.return_value = self.create_simple_success_response()
      with temporary_file() as fp:
        fp.write(self.get_valid_config())
        fp.flush()
        cmd = AuroraCommandLine()
        result = cmd.execute(['beta-update', 'start', self.TEST_JOBSPEC, fp.name])
        assert result == EXIT_OK

      assert mock_api.start_job_update.call_count == 1
      args, kwargs = mock_api.start_job_update.call_args
      assert isinstance(args[0], AuroraConfig)
      assert args[1] is None
      assert mock_context.get_out() == [
          "Scheduler-driven update of job west/bozo/test/hello has started."]
      assert mock_context.get_err() == []
Esempio n. 27
0
    def test_resume_update_command_line_succeeds(self):
        mock_context = FakeAuroraCommandContext()
        with contextlib.nested(
                patch('apache.aurora.client.cli.update.Update.create_context',
                      return_value=mock_context),
                patch('apache.aurora.client.factory.CLUSTERS',
                      new=self.TEST_CLUSTERS)):
            mock_api = mock_context.get_api('west')
            mock_api.resume_job_update.return_value = self.create_simple_success_response(
            )
            with temporary_file() as fp:
                fp.write(self.get_valid_config())
                fp.flush()
                cmd = AuroraCommandLine()
                result = cmd.execute(['update', 'resume', self.TEST_JOBSPEC])
                assert result == EXIT_OK

            mock_api.resume_job_update.assert_called_with(self.TEST_JOBKEY)
            assert mock_context.get_out() == [
                "Scheduler-driven update of job west/bozo/test/hello has been resumed."
            ]
Esempio n. 28
0
    def test_pause_update_command_line_error(self):
        mock_context = FakeAuroraCommandContext()
        with contextlib.nested(
                patch('apache.aurora.client.cli.update.Update.create_context',
                      return_value=mock_context),
                patch('apache.aurora.client.factory.CLUSTERS',
                      new=self.TEST_CLUSTERS)):
            mock_api = mock_context.get_api('west')
            mock_api.pause_job_update.return_value = self.create_error_response(
            )
            with temporary_file() as fp:
                fp.write(self.get_valid_config())
                fp.flush()
                cmd = AuroraCommandLine()
                result = cmd.execute(['update', 'pause', self.TEST_JOBSPEC])
                assert result == EXIT_API_ERROR

            mock_api.pause_job_update.assert_called_with(self.TEST_JOBKEY)
            assert mock_context.get_out() == []
            assert mock_context.get_err() == [
                "Error: Failed to pause scheduler-driven update; see log for details"
            ]
Esempio n. 29
0
  def test_kill_job_with_instances_batched_output(self):
    """Test kill client-side API logic."""
    mock_context = FakeAuroraCommandContext()
    with contextlib.nested(
        patch('threading._Event.wait'),
        patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context),
        patch('apache.aurora.client.factory.CLUSTERS', new=self.TEST_CLUSTERS)):
      api = mock_context.get_api('west')
      status_result = self.create_status_call_result()
      mock_context.add_expected_status_query_result(status_result)
      api.kill_job.return_value = self.get_kill_job_response()
      mock_context.add_expected_status_query_result(self.create_status_call_result(
          self.create_mock_task(ScheduleStatus.KILLED)))

      with temporary_file() as fp:
        fp.write(self.get_valid_config())
        fp.flush()
        cmd = AuroraCommandLine()
        cmd.execute(['job', 'kill', '--config=%s' % fp.name, 'west/bozo/test/hello/0,2,4-6'])

    assert mock_context.get_out() == ['Successfully killed shards [0, 2, 4, 5, 6]',
        'job kill succeeded']
    assert mock_context.get_err() == []
Esempio n. 30
0
  def test_kill_job_with_instances_batched_maxerrors_output(self):
    """Test kill client-side API logic."""
    mock_context = FakeAuroraCommandContext()
    with contextlib.nested(
        patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context),
        patch('apache.aurora.client.factory.CLUSTERS', new=self.TEST_CLUSTERS)):
      api = mock_context.get_api('west')
      status_result = self.create_status_call_result()
      mock_context.add_expected_status_query_result(status_result)
      api.kill_job.return_value = self.create_error_response()

      with temporary_file() as fp:
        fp.write(self.get_valid_config())
        fp.flush()
        cmd = AuroraCommandLine()
        cmd.execute(['job', 'kill', '--max-total-failures=1', '--config=%s' % fp.name,
            'west/bozo/test/hello/0,2,4-13'])

      assert mock_context.get_out() == []
      assert mock_context.get_err() == [
         'Kill of shards [0, 2, 4, 5, 6] failed with error:', '\tDamn',
         'Kill of shards [7, 8, 9, 10, 11] failed with error:', '\tDamn',
         'Exceeded maximum number of errors while killing instances']
Esempio n. 31
0
  def test_killall_job_output(self):
    """Test kill output."""
    mock_context = FakeAuroraCommandContext()
    mock_scheduler_proxy = Mock()
    with contextlib.nested(
        patch('threading._Event.wait'),
        patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context),
        patch('apache.aurora.client.factory.CLUSTERS', new=self.TEST_CLUSTERS)):

      api = mock_context.get_api('west')
      mock_scheduler_proxy.getTasksWithoutConfigs.return_value = self.create_status_call_result()
      api.kill_job.return_value = self.get_kill_job_response()
      mock_scheduler_proxy.killTasks.return_value = self.get_kill_job_response()
      mock_context.add_expected_status_query_result(self.create_status_call_result(
          self.create_mock_task(ScheduleStatus.KILLED)))
      with temporary_file() as fp:
        fp.write(self.get_valid_config())
        fp.flush()
        cmd = AuroraCommandLine()
        cmd.execute(['job', 'killall', '--no-batching', '--config=%s' % fp.name,
            'west/bozo/test/hello'])
      assert mock_context.get_out() == ['job killall succeeded']
      assert mock_context.get_err() == []
Esempio n. 32
0
  def test_simple_successful_create_job_output(self):
    """Run a test of the "create" command against a mocked-out API:
    Verifies that the creation command generates the correct output.
    """
    mock_context = FakeAuroraCommandContext()
    with contextlib.nested(
        patch('threading._Event.wait'),
        patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context)):
      mock_context.add_expected_status_query_result(
        self.create_mock_status_query_result(ScheduleStatus.PENDING))
      mock_context.add_expected_status_query_result(
        self.create_mock_status_query_result(ScheduleStatus.RUNNING))
      api = mock_context.get_api('west')
      api.create_job.return_value = self.get_createjob_response()

      with temporary_file() as fp:
        fp.write(self.get_valid_config())
        fp.flush()
        cmd = AuroraCommandLine()
        cmd.execute(['job', 'create', '--wait-until=RUNNING', 'west/bozo/test/hello',
            fp.name])
      assert mock_context.get_out() == [
          "job create succeeded: job url=http://something_or_other/scheduler/bozo/test/hello"]
      assert mock_context.get_err() == []
Esempio n. 33
0
 def test_successful_status_json_output_no_metadata(self):
     """Test the status command more deeply: in a request with a fully specified
 job, it should end up doing a query using getTasksWithoutConfigs."""
     mock_context = FakeAuroraCommandContext()
     mock_context.add_expected_status_query_result(
         self.get_task_status_json())
     with contextlib.nested(
             patch('apache.aurora.client.cli.jobs.Job.create_context',
                   return_value=mock_context),
             patch('apache.aurora.client.factory.CLUSTERS',
                   new=self.TEST_CLUSTERS)):
         cmd = AuroraCommandLine()
         cmd.execute(
             ['job', 'status', '--write-json', 'west/bozo/test/hello'])
         actual = re.sub("\\d\\d:\\d\\d:\\d\\d", "##:##:##",
                         '\n'.join(mock_context.get_out()))
         expected = textwrap.dedent("""\
     [
       {
         "active": [
           {
             "status": "RUNNING",
             "assignedTask": {
               "task": {
                 "isService": false,
                 "environment": "prod",
                 "requestedPorts": [
                   "http"
                 ],
                 "jobName": "flibber",
                 "priority": 7,
                 "owner": {
                   "role": "nobody"
                 },
                 "production": false,
                 "diskMb": 4096,
                 "ramMb": 2048,
                 "maxTaskFailures": 3,
                 "numCpus": 2
               },
               "taskId": "task_0",
               "instanceId": 0,
               "assignedPorts": {
                 "http": 1001
               },
               "slaveHost": "junk.nothing",
               "slaveId": "random_machine_id"
             },
             "ancestorId": "random_task_ancestor0",
             "taskEvents": [
               {
                 "status": "PENDING",
                 "timestamp": 123456,
                 "message": "looking for a host"
               },
               {
                 "status": "ASSIGNED",
                 "timestamp": 123466,
                 "message": "found a host"
               },
               {
                 "status": "RUNNING",
                 "timestamp": 123476,
                 "message": "running"
               }
             ],
             "failureCount": 4
           },
           {
             "status": "RUNNING",
             "assignedTask": {
               "task": {
                 "isService": false,
                 "environment": "prod",
                 "requestedPorts": [
                   "http"
                 ],
                 "jobName": "flibber",
                 "priority": 7,
                 "owner": {
                   "role": "nobody"
                 },
                 "production": false,
                 "diskMb": 4096,
                 "ramMb": 2048,
                 "maxTaskFailures": 3,
                 "numCpus": 2
               },
               "taskId": "task_1",
               "instanceId": 0,
               "assignedPorts": {
                 "http": 1001
               },
               "slaveHost": "junk.nothing",
               "slaveId": "random_machine_id"
             },
             "ancestorId": "random_task_ancestor1",
             "taskEvents": [
               {
                 "status": "PENDING",
                 "timestamp": 234567,
                 "message": "looking for a host"
               },
               {
                 "status": "ASSIGNED",
                 "timestamp": 234577,
                 "message": "found a host"
               },
               {
                 "status": "RUNNING",
                 "timestamp": 234587,
                 "message": "running"
               }
             ],
             "failureCount": 5
           }
         ],
         "job": "west/bozo/test/hello",
         "inactive": []
       }
     ]""")
         assert actual == expected
Esempio n. 34
0
 def test_successful_status_json_output_no_metadata(self):
   """Test the status command more deeply: in a request with a fully specified
   job, it should end up doing a query using getTasksWithoutConfigs."""
   mock_context = FakeAuroraCommandContext()
   mock_context.add_expected_status_query_result(self.get_task_status_json())
   with contextlib.nested(
       patch('apache.aurora.client.cli.jobs.Job.create_context', return_value=mock_context),
       patch('apache.aurora.client.factory.CLUSTERS', new=self.TEST_CLUSTERS)):
     cmd = AuroraCommandLine()
     cmd.execute(['job', 'status', '--write-json', 'west/bozo/test/hello'])
     actual = re.sub("\\d\\d:\\d\\d:\\d\\d", "##:##:##", '\n'.join(mock_context.get_out()))
     expected = textwrap.dedent("""\
       [
         {
           "active": [
             {
               "status": "RUNNING",
               "assignedTask": {
                 "task": {
                   "isService": false,
                   "environment": "prod",
                   "requestedPorts": [
                     "http"
                   ],
                   "jobName": "flibber",
                   "priority": 7,
                   "owner": {
                     "role": "nobody"
                   },
                   "job": {
                     "environment": "prod",
                     "role": "nobody",
                     "name": "flibber"
                   },
                   "production": false,
                   "diskMb": 4096,
                   "ramMb": 2048,
                   "maxTaskFailures": 3,
                   "numCpus": 2
                 },
                 "taskId": "task_0",
                 "instanceId": 0,
                 "assignedPorts": {
                   "http": 1001
                 },
                 "slaveHost": "junk.nothing",
                 "slaveId": "random_machine_id"
               },
               "ancestorId": "random_task_ancestor0",
               "taskEvents": [
                 {
                   "status": "PENDING",
                   "timestamp": 123456,
                   "message": "looking for a host"
                 },
                 {
                   "status": "ASSIGNED",
                   "timestamp": 123466,
                   "message": "found a host"
                 },
                 {
                   "status": "RUNNING",
                   "timestamp": 123476,
                   "message": "running"
                 }
               ],
               "failureCount": 4
             },
             {
               "status": "RUNNING",
               "assignedTask": {
                 "task": {
                   "isService": false,
                   "environment": "prod",
                   "requestedPorts": [
                     "http"
                   ],
                   "jobName": "flibber",
                   "priority": 7,
                   "owner": {
                     "role": "nobody"
                   },
                   "job": {
                     "environment": "prod",
                     "role": "nobody",
                     "name": "flibber"
                   },
                   "production": false,
                   "diskMb": 4096,
                   "ramMb": 2048,
                   "maxTaskFailures": 3,
                   "numCpus": 2
                 },
                 "taskId": "task_1",
                 "instanceId": 1,
                 "assignedPorts": {
                   "http": 1001
                 },
                 "slaveHost": "junk.nothing",
                 "slaveId": "random_machine_id"
               },
               "ancestorId": "random_task_ancestor1",
               "taskEvents": [
                 {
                   "status": "PENDING",
                   "timestamp": 234567,
                   "message": "looking for a host"
                 },
                 {
                   "status": "ASSIGNED",
                   "timestamp": 234577,
                   "message": "found a host"
                 },
                 {
                   "status": "RUNNING",
                   "timestamp": 234587,
                   "message": "running"
                 }
               ],
               "failureCount": 5
             }
           ],
           "job": "west/bozo/test/hello",
           "inactive": []
         }
       ]""")
     assert actual == expected