Example #1
0
def test_default_compression_algorithm(request):
    runner = AgentRunner(PACKAGE_INSTALL, send_to_server=False)

    # deflate is the default
    install_rpm()

    runner.start()
    time.sleep(1)

    agent_status = runner.status_json(True)
    compression_algorithm = agent_status["compression_type"]
    compression_level = agent_status["compression_level"]

    assert compression_algorithm == "deflate", "expected deflate, got %s" % (
        compression_algorithm)
    assert compression_level == 6

    runner.stop()
Example #2
0
def _test_standalone_smoke(
    agent_installation_type,
    python_version=None,
    rate_limited=False,
    workers_type="thread",
    workers_sessions_count=1,
):
    """
    Agent standalone test to run within the same machine.
    """
    if agent_installation_type == DEV_INSTALL:
        os.chdir(get_package_root())

    print("Agent host name: {0}".format(compat.os_environ_unicode["AGENT_HOST_NAME"]))

    runner = AgentRunner(
        agent_installation_type,
        enable_debug_log=True,
        workers_type=workers_type,
        workers_session_count=workers_sessions_count,
    )

    if python_version:
        runner.switch_version(python_version)

    agent_log_verifier = AgentLogVerifier(
        runner, compat.os_environ_unicode["SCALYR_SERVER"]
    )
    worker_sessions_verifier = None

    if workers_type == "process":
        worker_sessions_verifier = AgentWorkerSessionLogVerifier(
            runner, compat.os_environ_unicode["SCALYR_SERVER"]
        )

    if rate_limited:
        data_json_verifier = DataJsonVerifierRateLimited(
            runner, compat.os_environ_unicode["SCALYR_SERVER"]
        )
    else:
        data_json_verifier = DataJsonVerifier(
            runner, compat.os_environ_unicode["SCALYR_SERVER"]
        )
    system_metrics_verifier = SystemMetricsVerifier(
        runner, compat.os_environ_unicode["SCALYR_SERVER"]
    )
    process_metrics_verifier = ProcessMetricsVerifier(
        runner, compat.os_environ_unicode["SCALYR_SERVER"]
    )

    _create_data_json_file(runner=runner, data_json_verifier=data_json_verifier)

    runner.start()

    time.sleep(1)

    print("Verify 'agent.log'")
    assert agent_log_verifier.verify(), "Verification of the file: 'agent.log' failed"

    if workers_type == "process":
        # if workers are multiprocess, then we check if worker session logs are ingested.
        print("Verify worker sessions log files.")
        assert (
            worker_sessions_verifier.verify()
        ), "Verification of the worker session log files is failed"

    print("Verify 'linux_system_metrics.log'")
    assert (
        system_metrics_verifier.verify()
    ), "Verification of the file: 'linux_system_metrics.log' failed"
    print("Verify 'linux_process_metrics.log'")
    assert (
        process_metrics_verifier.verify()
    ), "Verification of the file: 'linux_process_metrics.log' failed"

    assert data_json_verifier.verify(), "Verification of the file: 'data.json' failed"

    if workers_type == "process":
        # increase worker sessions count, restart and also check if all worker logs are ingested.
        config = runner.config
        sessions_count = config["default_sessions_per_worker"]
        sessions_count += 1

        worker_sessions_verifier = AgentWorkerSessionLogVerifier(
            runner, compat.os_environ_unicode["SCALYR_SERVER"]
        )

        config["default_sessions_per_worker"] = sessions_count
        runner.write_config(config)
        runner.restart()

        agent_status = json.loads(runner.status_json())
        assert (
            len(agent_status["copying_manager_status"]["workers"][-1]["sessions"])
            == sessions_count
        )

        print("Verify worker sessions log files.")

        assert (
            worker_sessions_verifier.verify()
        ), "Verification of the worker session log files is failed"

    runner.stop()