def test_smdataparallel_profiling_ranges(profiler_config_path, test_case):
    profiling_parameters, expected_enabled, expected_can_save, expected_values = test_case
    start_step, num_steps = profiling_parameters

    smdataparallel_profiling_config = build_metrics_config(
        StartStep=start_step, NumSteps=num_steps)

    full_config = {
        "ProfilingParameters": {
            "ProfilerEnabled": True,
            "SMDataparallelProfilingConfig": smdataparallel_profiling_config,
        }
    }

    with open(profiler_config_path, "w") as f:
        json.dump(full_config, f)

    profiler_config_parser = ProfilerConfigParser()
    assert profiler_config_parser.profiling_enabled

    smdataparallel_profiling_config = profiler_config_parser.config.smdataparallel_profiling_config
    assert smdataparallel_profiling_config.is_enabled() == expected_enabled
    assert (profiler_config_parser.should_save_metrics(
        MetricsCategory.SMDATAPARALLEL_PROFILING,
        current_step,
        current_time=current_time) == expected_can_save)

    expected_start_step, expected_end_step = expected_values
    assert smdataparallel_profiling_config.start_step == expected_start_step
    assert smdataparallel_profiling_config.end_step == expected_end_step
def test_dataloader_profiling_ranges(profiler_config_path, test_case):
    profiling_parameters, expected_enabled, expected_can_save, expected_values = test_case
    start_step, metrics_regex, metrics_name = profiling_parameters
    dataloader_profiling_config = build_metrics_config(
        StartStep=start_step, MetricsRegex=metrics_regex)

    full_config = {
        "ProfilingParameters": {
            "ProfilerEnabled": True,
            "DataloaderProfilingConfig": dataloader_profiling_config,
        }
    }

    with open(profiler_config_path, "w") as f:
        json.dump(full_config, f)

    profiler_config_parser = ProfilerConfigParser()
    assert profiler_config_parser.profiling_enabled

    dataloader_profiling_config = profiler_config_parser.config.dataloader_profiling_config
    assert dataloader_profiling_config.is_enabled() == expected_enabled
    assert (profiler_config_parser.should_save_metrics(
        MetricsCategory.DATALOADER_PROFILING,
        current_step,
        metrics_name=metrics_name) == expected_can_save)

    expected_start_step, expected_end_step, expected_metrics_regex = expected_values
    assert dataloader_profiling_config.start_step == expected_start_step
    assert dataloader_profiling_config.end_step == expected_end_step
    assert dataloader_profiling_config.metrics_regex == expected_metrics_regex
def test_python_profiling_ranges(profiler_config_path, test_case):
    profiling_parameters, expected_enabled, expected_can_save, expected_values = test_case
    start_step, num_steps, profiler_name, cprofile_timer = profiling_parameters
    python_profiling_config = build_metrics_config(
        StartStep=start_step,
        NumSteps=num_steps,
        ProfilerName=profiler_name,
        cProfileTimer=cprofile_timer,
    )

    full_config = {
        "ProfilingParameters": {
            "ProfilerEnabled": True,
            "PythonProfilingConfig": python_profiling_config,
        }
    }

    with open(profiler_config_path, "w") as f:
        json.dump(full_config, f)

    profiler_config_parser = ProfilerConfigParser()
    assert profiler_config_parser.profiling_enabled

    python_profiling_config = profiler_config_parser.config.python_profiling_config
    assert python_profiling_config.is_enabled() == expected_enabled
    assert (profiler_config_parser.should_save_metrics(
        MetricsCategory.PYTHON_PROFILING, current_step) == expected_can_save)

    expected_start_step, expected_end_step, expected_profiler_name, expected_cprofile_timer = (
        expected_values)
    assert python_profiling_config.start_step == expected_start_step
    assert python_profiling_config.end_step == expected_end_step
    assert python_profiling_config.profiler_name == expected_profiler_name
    assert python_profiling_config.cprofile_timer == expected_cprofile_timer
def test_detailed_profiling_ranges(profiler_config_path, test_case):
    profiling_parameters, expected_enabled, expected_can_save, expected_values = test_case
    start_step, num_steps, start_time, duration = profiling_parameters
    detailed_profiling_config = build_metrics_config(
        StartStep=start_step,
        NumSteps=num_steps,
        StartTimeInSecSinceEpoch=start_time,
        DurationInSeconds=duration,
    )

    full_config = {
        "ProfilingParameters": {
            "ProfilerEnabled": True,
            "DetailedProfilingConfig": detailed_profiling_config,
        }
    }

    with open(profiler_config_path, "w") as f:
        json.dump(full_config, f)

    profiler_config_parser = ProfilerConfigParser()
    assert profiler_config_parser.profiling_enabled

    detailed_profiling_config = profiler_config_parser.config.detailed_profiling_config
    assert detailed_profiling_config.is_enabled() == expected_enabled
    assert (profiler_config_parser.should_save_metrics(
        MetricsCategory.DETAILED_PROFILING,
        current_step,
        current_time=current_time) == expected_can_save)

    expected_start_step, expected_end_step, expected_start_time, expected_end_time = expected_values
    assert detailed_profiling_config.start_step == expected_start_step
    assert detailed_profiling_config.end_step == expected_end_step
    assert detailed_profiling_config.start_time_in_sec == expected_start_time
    assert detailed_profiling_config.end_time == expected_end_time