Пример #1
0
def test_compose_job_id_with_git_hash():
    fake_service = "my_cool_service"
    fake_instance = "main"
    fake_git_hash = "git123abc"
    with raises(utils.InvalidJobNameError):
        utils.compose_job_id(fake_service,
                             fake_instance,
                             git_hash=fake_git_hash)
Пример #2
0
def test_compose_job_id_with_config_hash():
    fake_service = "my_cool_service"
    fake_instance = "main"
    fake_config_hash = "config456def"
    with raises(utils.InvalidJobNameError):
        utils.compose_job_id(fake_service,
                             fake_instance,
                             config_hash=fake_config_hash)
Пример #3
0
def test_compose_job_id_with_hashes():
    fake_service = "my_cool_service"
    fake_instance = "main"
    fake_git_hash = "git123abc"
    fake_config_hash = "config456def"
    expected = "my_cool_service.main.git123abc.config456def"
    actual = utils.compose_job_id(fake_service, fake_instance, fake_git_hash, fake_config_hash)
    assert actual == expected
Пример #4
0
def test_compose_job_id_with_hashes():
    fake_service = "my_cool_service"
    fake_instance = "main"
    fake_git_hash = "git123abc"
    fake_config_hash = "config456def"
    expected = "my_cool_service.main.git123abc.config456def"
    actual = utils.compose_job_id(fake_service, fake_instance, fake_git_hash,
                                  fake_config_hash)
    assert actual == expected
Пример #5
0
def get_happy_tasks(app, service, nerve_ns, min_task_uptime=None, check_haproxy=False):
    """Given a MarathonApp object, return the subset of tasks which are considered healthy.
    With the default options, this returns tasks where at least one of the defined Marathon healthchecks passes.
    For it to do anything interesting, set min_task_uptime or check_haproxy.

    :param app: A MarathonApp object.
    :param service: The name of the service.
    :param nerve_ns: The nerve namespace
    :param min_task_uptime: Minimum number of seconds that a task must be running before we consider it healthy. Useful
                            if tasks take a while to start up.
    :param check_haproxy: Whether to check the local haproxy to make sure this task has been registered and discovered.
    """
    tasks = app.tasks
    happy = []
    now = datetime.datetime.utcnow()

    if check_haproxy:
        tasks_in_smartstack = []
        service_namespace = compose_job_id(service, nerve_ns)

        service_namespace_config = marathon_tools.load_service_namespace_config(service, nerve_ns)
        discover_location_type = service_namespace_config.get_discover()
        unique_values = mesos_tools.get_mesos_slaves_grouped_by_attribute(discover_location_type)

        for value, hosts in unique_values.iteritems():
            synapse_host = hosts[0]
            tasks_in_smartstack.extend(get_registered_marathon_tasks(
                synapse_host,
                DEFAULT_SYNAPSE_PORT,
                service_namespace,
                tasks,
            ))
        tasks = tasks_in_smartstack

    for task in tasks:
        if min_task_uptime is not None:
            if (now - task.started_at).total_seconds() < min_task_uptime:
                continue

        # if there are healthchecks defined for the app but none have executed yet, then task is unhappy
        if len(app.health_checks) > 0 and len(task.health_check_results) == 0:
            continue

        # if there are health check results, check if at least one healthcheck is passing
        if len(task.health_check_results) > 0:
            task_up = any([hc_result.alive is True for hc_result in task.health_check_results])
            if not task_up:
                continue
        happy.append(task)

    return happy
Пример #6
0
def test_send_event_users_monitoring_tools_send_event_respects_alert_after():
    fake_service_name = 'superfast'
    fake_namespace = 'jellyfish'
    fake_status = '999999'
    fake_output = 'YOU DID IT'
    fake_cluster = 'fake_cluster'
    fake_monitoring_overrides = {'alert_after': '666m'}
    fake_soa_dir = '/hi/hello/hey'
    fake_cluster = 'fake_cluster'
    expected_check_name = 'check_marathon_services_replication.%s' % compose_job_id(
        fake_service_name, fake_namespace)
    with contextlib.nested(
            mock.patch("paasta_tools.monitoring_tools.send_event",
                       autospec=True),
            mock.patch(
                'check_marathon_services_replication.load_system_paasta_config',
                autospec=True),
            mock.patch("paasta_tools.check_marathon_services_replication._log",
                       autospec=True),
            mock.patch(
                "paasta_tools.marathon_tools.load_marathon_service_config",
                autospec=True),
    ) as (
            send_event_patch,
            load_system_paasta_config_patch,
            log_patch,
            load_marathon_service_config_patch,
    ):
        load_marathon_service_config_patch.return_value.get_monitoring.return_value = fake_monitoring_overrides
        check_marathon_services_replication.send_event(
            fake_service_name, fake_namespace, fake_cluster, fake_soa_dir,
            fake_status, fake_output)
        send_event_patch.call_count == 1
        send_event_patch.assert_called_once_with(fake_service_name,
                                                 expected_check_name, mock.ANY,
                                                 fake_status, fake_output,
                                                 fake_soa_dir)
        # The overrides dictionary is mutated in the function under test, so
        # we expect the send_event_patch to be called with something that is a
        # superset of what we originally put in (fake_monitoring_overrides)
        actual_overrides_used = send_event_patch.call_args[0][2]
        assert set({
            'alert_after': '666m'
        }.items()).issubset(set(actual_overrides_used.items()))
        assert not set({
            'alert_after': '2m'
        }.items()).issubset(set(actual_overrides_used.items()))
def test_send_event_users_monitoring_tools_send_event_respects_alert_after():
    fake_service_name = 'superfast'
    fake_namespace = 'jellyfish'
    fake_status = '999999'
    fake_output = 'YOU DID IT'
    fake_cluster = 'fake_cluster'
    fake_monitoring_overrides = {'alert_after': '666m'}
    fake_soa_dir = '/hi/hello/hey'
    fake_cluster = 'fake_cluster'
    expected_check_name = 'check_marathon_services_replication.%s' % compose_job_id(fake_service_name, fake_namespace)
    with contextlib.nested(
        mock.patch("paasta_tools.monitoring_tools.send_event", autospec=True),
        mock.patch('check_marathon_services_replication.load_system_paasta_config', autospec=True),
        mock.patch("paasta_tools.check_marathon_services_replication._log", autospec=True),
        mock.patch("paasta_tools.marathon_tools.load_marathon_service_config", autospec=True),
    ) as (
        send_event_patch,
        load_system_paasta_config_patch,
        log_patch,
        load_marathon_service_config_patch,
    ):
        load_marathon_service_config_patch.return_value.get_monitoring.return_value = fake_monitoring_overrides
        check_marathon_services_replication.send_event(fake_service_name,
                                                       fake_namespace,
                                                       fake_cluster,
                                                       fake_soa_dir,
                                                       fake_status,
                                                       fake_output)
        send_event_patch.call_count == 1
        send_event_patch.assert_called_once_with(
            fake_service_name,
            expected_check_name,
            mock.ANY,
            fake_status,
            fake_output,
            fake_soa_dir
        )
        # The overrides dictionary is mutated in the function under test, so
        # we expect the send_event_patch to be called with something that is a
        # superset of what we originally put in (fake_monitoring_overrides)
        actual_overrides_used = send_event_patch.call_args[0][2]
        assert set({'alert_after': '666m'}.items()).issubset(set(actual_overrides_used.items()))
        assert not set({'alert_after': '2m'}.items()).issubset(set(actual_overrides_used.items()))
Пример #8
0
def test_compose_job_id_without_hashes():
    fake_service = "my_cool_service"
    fake_instance = "main"
    expected = "my_cool_service.main"
    actual = utils.compose_job_id(fake_service, fake_instance)
    assert actual == expected
Пример #9
0
def test_compose_job_id_with_config_hash():
    fake_service = "my_cool_service"
    fake_instance = "main"
    fake_config_hash = "config456def"
    with raises(utils.InvalidJobNameError):
        utils.compose_job_id(fake_service, fake_instance, config_hash=fake_config_hash)
Пример #10
0
def test_compose_job_id_with_git_hash():
    fake_service = "my_cool_service"
    fake_instance = "main"
    fake_git_hash = "git123abc"
    with raises(utils.InvalidJobNameError):
        utils.compose_job_id(fake_service, fake_instance, git_hash=fake_git_hash)
Пример #11
0
def test_compose_job_id_without_hashes():
    fake_service = "my_cool_service"
    fake_instance = "main"
    expected = "my_cool_service.main"
    actual = utils.compose_job_id(fake_service, fake_instance)
    assert actual == expected