def test_update_step_profiler_config_parser(
    monkeypatch, old_step_profiler_config_parser_path, new_step_profiler_config_parser_path
):
    """
    This test is meant to test two behaviors when profiler config parser dynamically reloads a config with step fields:
        - Reloading the config when the JSON hasn't changed will not reload the step fields (this is important when the
          JSON does not have specified step parameters, for example).
        - Reloading the config when the JSON has changed will reload the step fields in the new JSON.
    """
    # sanity check that the parser first parses the range fields as is.
    monkeypatch.setenv("SMPROFILER_CONFIG_PATH", old_step_profiler_config_parser_path)
    profiler_config_parser = ProfilerConfigParser()
    assert profiler_config_parser.profiling_enabled
    assert profiler_config_parser.config.detailed_profiling_config.is_enabled()
    assert profiler_config_parser.config.detailed_profiling_config.start_step is None
    assert profiler_config_parser.config.detailed_profiling_config.num_steps == 2

    # sanity check that calling should save metrics will replace unspecified range fields, and leave the rest as is.
    profiler_config_parser.should_save_metrics(MetricsCategory.DETAILED_PROFILING, 5)
    assert profiler_config_parser.config.detailed_profiling_config.start_step == 5
    assert profiler_config_parser.config.detailed_profiling_config.num_steps == 2

    # check that reloading the config when it hasn't changed won't change the config fields.
    profiler_config_parser.load_config()
    assert profiler_config_parser.config.detailed_profiling_config.start_step == 5
    assert profiler_config_parser.config.detailed_profiling_config.num_steps == 2

    # check that reloading the config when it has changed will update the config fields.
    monkeypatch.setenv("SMPROFILER_CONFIG_PATH", new_step_profiler_config_parser_path)
    profiler_config_parser.load_config()
    assert profiler_config_parser.profiling_enabled
    assert profiler_config_parser.config.detailed_profiling_config.is_enabled()
    assert profiler_config_parser.config.detailed_profiling_config.start_step == 10
    assert profiler_config_parser.config.detailed_profiling_config.num_steps == 5
def test_dataloader_profiling_ranges(detailed_profiler_config_path, test_case):
    profiling_parameters, expected_enabled, expected_can_save, expected_values = test_case
    start_step, metrics_regex, metrics_name = profiling_parameters
    dataloader_config = "{"
    if start_step:
        dataloader_config += _convert_key_and_value("StartStep", start_step)
    if metrics_regex:
        dataloader_config += _convert_key_and_value("MetricsRegex",
                                                    metrics_regex)
    dataloader_config += "}"

    full_config = {
        "ProfilingParameters": {
            "ProfilerEnabled": True,
            "DataloaderProfilingConfig": dataloader_config,
        }
    }

    with open(detailed_profiler_config_path, "w") as f:
        json.dump(full_config, f)

    profiler_config_parser = ProfilerConfigParser()
    assert profiler_config_parser.profiling_enabled

    dataloader_profiling_config = profiler_config_parser.config.dataloader_profiling_config
    assert dataloader_profiling_config.is_enabled() == expected_enabled
    assert (profiler_config_parser.should_save_metrics(
        MetricsCategory.DATALOADER_PROFILING,
        current_step,
        metrics_name=metrics_name) == expected_can_save)

    expected_start_step, expected_end_step, expected_metrics_regex = expected_values
    assert dataloader_profiling_config.start_step == expected_start_step
    assert dataloader_profiling_config.end_step == expected_end_step
    assert dataloader_profiling_config.metrics_regex == expected_metrics_regex
def test_smdataparallel_profiling_ranges(profiler_config_path, test_case):
    profiling_parameters, expected_enabled, expected_can_save, expected_values = test_case
    start_step, num_steps = profiling_parameters

    smdataparallel_profiling_config = build_metrics_config(
        StartStep=start_step, NumSteps=num_steps)

    full_config = {
        "ProfilingParameters": {
            "ProfilerEnabled": True,
            "SMDataparallelProfilingConfig": smdataparallel_profiling_config,
        }
    }

    with open(profiler_config_path, "w") as f:
        json.dump(full_config, f)

    profiler_config_parser = ProfilerConfigParser()
    assert profiler_config_parser.profiling_enabled

    smdataparallel_profiling_config = profiler_config_parser.config.smdataparallel_profiling_config
    assert smdataparallel_profiling_config.is_enabled() == expected_enabled
    assert (profiler_config_parser.should_save_metrics(
        MetricsCategory.SMDATAPARALLEL_PROFILING,
        current_step,
        current_time=current_time) == expected_can_save)

    expected_start_step, expected_end_step = expected_values
    assert smdataparallel_profiling_config.start_step == expected_start_step
    assert smdataparallel_profiling_config.end_step == expected_end_step
def test_python_profiling_ranges(profiler_config_path, test_case):
    profiling_parameters, expected_enabled, expected_can_save, expected_values = test_case
    start_step, num_steps, profiler_name, cprofile_timer = profiling_parameters
    python_profiling_config = build_metrics_config(
        StartStep=start_step,
        NumSteps=num_steps,
        ProfilerName=profiler_name,
        cProfileTimer=cprofile_timer,
    )

    full_config = {
        "ProfilingParameters": {
            "ProfilerEnabled": True,
            "PythonProfilingConfig": python_profiling_config,
        }
    }

    with open(profiler_config_path, "w") as f:
        json.dump(full_config, f)

    profiler_config_parser = ProfilerConfigParser()
    assert profiler_config_parser.profiling_enabled

    python_profiling_config = profiler_config_parser.config.python_profiling_config
    assert python_profiling_config.is_enabled() == expected_enabled
    assert (profiler_config_parser.should_save_metrics(
        MetricsCategory.PYTHON_PROFILING, current_step) == expected_can_save)

    expected_start_step, expected_end_step, expected_profiler_name, expected_cprofile_timer = (
        expected_values)
    assert python_profiling_config.start_step == expected_start_step
    assert python_profiling_config.end_step == expected_end_step
    assert python_profiling_config.profiler_name == expected_profiler_name
    assert python_profiling_config.cprofile_timer == expected_cprofile_timer
def test_detailed_profiling_ranges(profiler_config_path, test_case):
    profiling_parameters, expected_enabled, expected_can_save, expected_values = test_case
    start_step, num_steps, start_time, duration = profiling_parameters
    detailed_profiling_config = build_metrics_config(
        StartStep=start_step,
        NumSteps=num_steps,
        StartTimeInSecSinceEpoch=start_time,
        DurationInSeconds=duration,
    )

    full_config = {
        "ProfilingParameters": {
            "ProfilerEnabled": True,
            "DetailedProfilingConfig": detailed_profiling_config,
        }
    }

    with open(profiler_config_path, "w") as f:
        json.dump(full_config, f)

    profiler_config_parser = ProfilerConfigParser()
    assert profiler_config_parser.profiling_enabled

    detailed_profiling_config = profiler_config_parser.config.detailed_profiling_config
    assert detailed_profiling_config.is_enabled() == expected_enabled
    assert (profiler_config_parser.should_save_metrics(
        MetricsCategory.DETAILED_PROFILING,
        current_step,
        current_time=current_time) == expected_can_save)

    expected_start_step, expected_end_step, expected_start_time, expected_end_time = expected_values
    assert detailed_profiling_config.start_step == expected_start_step
    assert detailed_profiling_config.end_step == expected_end_step
    assert detailed_profiling_config.start_time_in_sec == expected_start_time
    assert detailed_profiling_config.end_time == expected_end_time
def test_detailed_profiling_ranges(detailed_profiler_config_path, test_case):
    profiling_parameters, expected_enabled, expected_can_save, expected_values = test_case
    start_step, num_steps, start_time, duration = profiling_parameters
    detailed_profiler_config = "{"
    if start_step:
        detailed_profiler_config += _convert_key_and_value(
            "StartStep", start_step)
    if num_steps:
        detailed_profiler_config += _convert_key_and_value(
            "NumSteps", num_steps)
    if start_time:
        detailed_profiler_config += _convert_key_and_value(
            "StartTimeInSecSinceEpoch", start_time)
    if duration:
        detailed_profiler_config += _convert_key_and_value(
            "DurationInSeconds", duration)
    detailed_profiler_config += "}"

    full_config = {
        "ProfilingParameters": {
            "ProfilerEnabled": True,
            "DetailedProfilingConfig": detailed_profiler_config,
        }
    }

    with open(detailed_profiler_config_path, "w") as f:
        json.dump(full_config, f)

    profiler_config_parser = ProfilerConfigParser()
    assert profiler_config_parser.profiling_enabled

    detailed_profiling_config = profiler_config_parser.config.detailed_profiling_config
    assert detailed_profiling_config.is_enabled() == expected_enabled
    assert (profiler_config_parser.should_save_metrics(
        MetricsCategory.DETAILED_PROFILING,
        current_step,
        current_time=current_time) == expected_can_save)

    expected_start_step, expected_end_step, expected_start_time, expected_end_time = expected_values
    assert detailed_profiling_config.start_step == expected_start_step
    assert detailed_profiling_config.end_step == expected_end_step
    assert detailed_profiling_config.start_time_in_sec == expected_start_time
    assert detailed_profiling_config.end_time == expected_end_time
def test_python_profiling_ranges(python_profiler_config_path, test_case):
    profiling_parameters, expected_enabled, expected_can_save, expected_values = test_case
    start_step, num_steps, profiler_name, cprofile_timer = profiling_parameters
    python_profiler_config = "{"
    if start_step is not None:
        python_profiler_config += _convert_key_and_value(
            "StartStep", start_step)
    if num_steps is not None:
        python_profiler_config += _convert_key_and_value("NumSteps", num_steps)
    if profiler_name is not None:
        python_profiler_config += _convert_key_and_value(
            "ProfilerName", profiler_name)
    if cprofile_timer is not None:
        python_profiler_config += _convert_key_and_value(
            "cProfileTimer", cprofile_timer)
    python_profiler_config += "}"

    full_config = {
        "ProfilingParameters": {
            "ProfilerEnabled": True,
            "PythonProfilingConfig": python_profiler_config,
        }
    }

    with open(python_profiler_config_path, "w") as f:
        json.dump(full_config, f)

    profiler_config_parser = ProfilerConfigParser()
    assert profiler_config_parser.profiling_enabled

    python_profiling_config = profiler_config_parser.config.python_profiling_config
    assert python_profiling_config.is_enabled() == expected_enabled
    assert (profiler_config_parser.should_save_metrics(
        MetricsCategory.PYTHON_PROFILING, current_step) == expected_can_save)

    expected_start_step, expected_end_step, expected_profiler_name, expected_cprofile_timer = (
        expected_values)
    assert python_profiling_config.start_step == expected_start_step
    assert python_profiling_config.end_step == expected_end_step
    assert python_profiling_config.profiler_name == expected_profiler_name
    assert python_profiling_config.cprofile_timer == expected_cprofile_timer