Beispiel #1
0
def _read_one_composite(saliency_file_name, smoothing_radius_grid_cells):
    """Reads saliency map for one composite.

    T = number of input tensors for model

    :param saliency_file_name: Path to input file (will be read by
        `saliency.read_file`).
    :param smoothing_radius_grid_cells: Radius for Gaussian smoother, used only
        for saliency map.
    :return: mean_predictor_matrices: length-T list of numpy arrays, where the
        [i]th item has dimensions of the [i]th input tensor for the model.
    :return: mean_saliency_matrices: Same as `mean_predictor_matrices` but with
        saliency values.
    :return: model_metadata_dict: Dictionary returned by
        `cnn.read_model_metadata`.
    """

    print('Reading data from: "{0:s}"...'.format(saliency_file_name))
    saliency_dict = saliency_maps.read_file(saliency_file_name)[0]

    mean_predictor_matrices = saliency_dict[MEAN_PREDICTOR_MATRICES_KEY]
    mean_saliency_matrices = saliency_dict[MEAN_SALIENCY_MATRICES_KEY]
    num_matrices = len(mean_predictor_matrices)

    for i in range(num_matrices):
        mean_predictor_matrices[i] = numpy.expand_dims(
            mean_predictor_matrices[i], axis=0)
        mean_saliency_matrices[i] = numpy.expand_dims(
            mean_saliency_matrices[i], axis=0)

    model_file_name = saliency_dict[MODEL_FILE_KEY]
    model_metafile_name = cnn.find_metafile(model_file_name)

    print('Reading CNN metadata from: "{0:s}"...'.format(model_metafile_name))
    model_metadata_dict = cnn.read_model_metadata(model_metafile_name)
    training_option_dict = model_metadata_dict[cnn.TRAINING_OPTION_DICT_KEY]
    training_option_dict[trainval_io.UPSAMPLE_REFLECTIVITY_KEY] = False

    all_refl_heights_m_agl = training_option_dict[
        trainval_io.RADAR_HEIGHTS_KEY]
    good_flags = numpy.array(
        [h in REFL_HEIGHTS_M_AGL for h in all_refl_heights_m_agl], dtype=bool)
    good_indices = numpy.where(good_flags)[0]

    mean_predictor_matrices[0] = (mean_predictor_matrices[0][...,
                                                             good_indices, :])
    mean_saliency_matrices[0] = (mean_saliency_matrices[0][...,
                                                           good_indices, :])

    training_option_dict[trainval_io.RADAR_HEIGHTS_KEY] = REFL_HEIGHTS_M_AGL
    model_metadata_dict[cnn.TRAINING_OPTION_DICT_KEY] = training_option_dict

    if smoothing_radius_grid_cells is not None:
        mean_saliency_matrices = _smooth_maps(
            saliency_matrices=mean_saliency_matrices,
            smoothing_radius_grid_cells=smoothing_radius_grid_cells)

    return mean_predictor_matrices, mean_saliency_matrices, model_metadata_dict
Beispiel #2
0
def _composite_saliency_maps(
        input_file_name, max_percentile_level, output_file_name):
    """Composites predictors and resulting saliency maps.

    :param input_file_name: Path to input file.  Will be read by
        `saliency_maps.read_file`.
    :param max_percentile_level: See documentation at top of file.
    :param output_file_name: Path to output file.  Will be written by
        `saliency_maps.write_pmm_file`.
    """

    print('Reading data from: "{0:s}"...'.format(input_file_name))
    saliency_dict = saliency_maps.read_file(input_file_name)[0]

    predictor_matrices = saliency_dict[saliency_maps.PREDICTOR_MATRICES_KEY]
    saliency_matrices = saliency_dict[saliency_maps.SALIENCY_MATRICES_KEY]
    sounding_pressure_matrix_pa = saliency_dict[
        saliency_maps.SOUNDING_PRESSURES_KEY]

    print('Compositing predictor matrices...')
    mean_predictor_matrices, mean_sounding_pressures_pa = _composite_predictors(
        predictor_matrices=predictor_matrices,
        max_percentile_level=max_percentile_level,
        sounding_pressure_matrix_pa=sounding_pressure_matrix_pa)

    print('Compositing saliency maps...')
    num_matrices = len(predictor_matrices)
    mean_saliency_matrices = [None] * num_matrices

    for i in range(num_matrices):
        mean_saliency_matrices[i] = pmm.run_pmm_many_variables(
            input_matrix=saliency_matrices[i],
            max_percentile_level=max_percentile_level)

    print('Writing output to: "{0:s}"...'.format(output_file_name))
    saliency_maps.write_pmm_file(
        pickle_file_name=output_file_name,
        mean_denorm_predictor_matrices=mean_predictor_matrices,
        mean_saliency_matrices=mean_saliency_matrices,
        model_file_name=saliency_dict[saliency_maps.MODEL_FILE_KEY],
        non_pmm_file_name=input_file_name,
        pmm_max_percentile_level=max_percentile_level,
        mean_sounding_pressures_pa=mean_sounding_pressures_pa)
Beispiel #3
0
def _read_one_composite(saliency_file_name, smoothing_radius_grid_cells):
    """Reads saliency map for one composite.

    E = number of examples
    M = number of rows in grid
    N = number of columns in grid
    H = number of heights in grid
    F = number of radar fields

    :param saliency_file_name: Path to input file (will be read by
        `saliency.read_file`).
    :param smoothing_radius_grid_cells: Radius for Gaussian smoother, used only
        for saliency map.
    :return: mean_radar_matrix: E-by-M-by-N-by-H-by-F numpy array with mean
        radar fields.
    :return: mean_saliency_matrix: E-by-M-by-N-by-H-by-F numpy array with mean
        saliency fields.
    :return: model_metadata_dict: Dictionary returned by
        `cnn.read_model_metadata`.
    """

    print('Reading data from: "{0:s}"...'.format(saliency_file_name))
    saliency_dict = saliency_maps.read_file(saliency_file_name)[0]

    mean_radar_matrix = numpy.expand_dims(
        saliency_dict[saliency_maps.MEAN_PREDICTOR_MATRICES_KEY][0], axis=0)
    mean_saliency_matrix = numpy.expand_dims(
        saliency_dict[saliency_maps.MEAN_SALIENCY_MATRICES_KEY][0], axis=0)

    if smoothing_radius_grid_cells is not None:
        print((
            'Smoothing saliency maps with Gaussian filter (e-folding radius of '
            '{0:.1f} grid cells)...').format(smoothing_radius_grid_cells))

        num_fields = mean_radar_matrix.shape[-1]

        for k in range(num_fields):
            mean_saliency_matrix[0, ..., k] = (
                general_utils.apply_gaussian_filter(
                    input_matrix=mean_saliency_matrix[0, ..., k],
                    e_folding_radius_grid_cells=smoothing_radius_grid_cells))

    model_file_name = saliency_dict[saliency_maps.MODEL_FILE_KEY]
    model_metafile_name = cnn.find_metafile(model_file_name)

    print('Reading CNN metadata from: "{0:s}"...'.format(model_metafile_name))
    model_metadata_dict = cnn.read_model_metadata(model_metafile_name)
    training_option_dict = model_metadata_dict[cnn.TRAINING_OPTION_DICT_KEY]

    good_indices = numpy.array([
        numpy.where(
            training_option_dict[trainval_io.RADAR_HEIGHTS_KEY] == h)[0][0]
        for h in RADAR_HEIGHTS_M_AGL
    ],
                               dtype=int)

    mean_radar_matrix = mean_radar_matrix[..., good_indices, :]
    mean_saliency_matrix = mean_saliency_matrix[..., good_indices, :]

    good_indices = numpy.array([
        training_option_dict[trainval_io.RADAR_FIELDS_KEY].index(f)
        for f in RADAR_FIELD_NAMES
    ],
                               dtype=int)

    mean_radar_matrix = mean_radar_matrix[..., good_indices]
    mean_saliency_matrix = mean_saliency_matrix[..., good_indices]

    training_option_dict[trainval_io.RADAR_HEIGHTS_KEY] = RADAR_HEIGHTS_M_AGL
    training_option_dict[trainval_io.RADAR_FIELDS_KEY] = RADAR_FIELD_NAMES
    training_option_dict[trainval_io.SOUNDING_FIELDS_KEY] = None
    model_metadata_dict[cnn.TRAINING_OPTION_DICT_KEY] = training_option_dict

    return mean_radar_matrix, mean_saliency_matrix, model_metadata_dict
Beispiel #4
0
def _read_one_composite(saliency_file_name, smoothing_radius_grid_cells,
                        monte_carlo_file_name):
    """Reads saliency map for one composite.

    T = number of model-input tensors with radar data

    :param saliency_file_name: Path to input file (will be read by
        `saliency.read_file`).
    :param smoothing_radius_grid_cells: Radius for Gaussian smoother, used only
        for saliency map.
    :param monte_carlo_file_name: Path to Monte Carlo file (will be read by
        `_read_monte_carlo_file`).
    :return: mean_radar_matrices: length-T list of numpy arrays with mean
        predictor values.  The [i]th array has the same dimensions as the [i]th
        input tensor to the model.
    :return: mean_saliency_matrices: Same but with saliency values.
    :return: significance_matrices: Same but with Boolean significance flags.
    :return: model_metadata_dict: Dictionary returned by
        `cnn.read_model_metadata`.
    """

    print('Reading data from: "{0:s}"...'.format(saliency_file_name))
    saliency_dict = saliency_maps.read_file(saliency_file_name)[0]

    mean_radar_matrices = saliency_dict[MEAN_PREDICTOR_MATRICES_KEY]
    mean_saliency_matrices = saliency_dict[MEAN_SALIENCY_MATRICES_KEY]
    num_matrices = len(mean_radar_matrices)

    for i in range(num_matrices):
        mean_radar_matrices[i] = numpy.expand_dims(mean_radar_matrices[i],
                                                   axis=0)
        mean_saliency_matrices[i] = numpy.expand_dims(
            mean_saliency_matrices[i], axis=0)

    model_file_name = saliency_dict[MODEL_FILE_KEY]
    model_metafile_name = cnn.find_metafile(model_file_name)

    print('Reading CNN metadata from: "{0:s}"...'.format(model_metafile_name))
    model_metadata_dict = cnn.read_model_metadata(model_metafile_name)
    training_option_dict = model_metadata_dict[cnn.TRAINING_OPTION_DICT_KEY]
    training_option_dict[trainval_io.UPSAMPLE_REFLECTIVITY_KEY] = False

    has_soundings = (training_option_dict[trainval_io.SOUNDING_FIELDS_KEY]
                     is not None)
    if has_soundings:
        mean_radar_matrices = mean_radar_matrices[:-1]
        mean_saliency_matrices = mean_saliency_matrices[:-1]

    if smoothing_radius_grid_cells is not None:
        mean_saliency_matrices = _smooth_maps(
            saliency_matrices=mean_saliency_matrices,
            smoothing_radius_grid_cells=smoothing_radius_grid_cells)

    num_matrices = len(mean_radar_matrices)
    significance_matrices = [None] * num_matrices

    if monte_carlo_file_name is None:
        for i in range(num_matrices):
            significance_matrices[i] = numpy.full(mean_radar_matrices[i].shape,
                                                  False,
                                                  dtype=bool)
    else:
        print('Reading Monte Carlo test from: "{0:s}"...'.format(
            monte_carlo_file_name))

        this_file_handle = open(monte_carlo_file_name, 'rb')
        monte_carlo_dict = pickle.load(this_file_handle)
        this_file_handle.close()

        for i in range(num_matrices):
            significance_matrices[i] = (
                monte_carlo_dict[monte_carlo.P_VALUE_MATRICES_KEY][i] <= 0.05)
            significance_matrices[i] = numpy.expand_dims(
                significance_matrices[i], axis=0)

    all_sig_flags = numpy.concatenate(
        [numpy.ravel(a) for a in significance_matrices])
    print('Fraction of significant differences: {0:.4f}'.format(
        numpy.mean(all_sig_flags.astype(float))))

    all_refl_heights_m_agl = training_option_dict[
        trainval_io.RADAR_HEIGHTS_KEY]
    good_flags = numpy.array(
        [h in REFL_HEIGHTS_M_AGL for h in all_refl_heights_m_agl], dtype=bool)
    good_indices = numpy.where(good_flags)[0]

    mean_radar_matrices[0] = (mean_radar_matrices[0][..., good_indices, :])
    mean_saliency_matrices[0] = (mean_saliency_matrices[0][...,
                                                           good_indices, :])
    significance_matrices[0] = (significance_matrices[0][..., good_indices, :])

    training_option_dict[trainval_io.RADAR_HEIGHTS_KEY] = REFL_HEIGHTS_M_AGL
    training_option_dict[trainval_io.SOUNDING_FIELDS_KEY] = None
    model_metadata_dict[cnn.TRAINING_OPTION_DICT_KEY] = training_option_dict

    return (mean_radar_matrices, mean_saliency_matrices, significance_matrices,
            model_metadata_dict)
def _run(input_file_name, colour_map_name, max_colour_value, half_num_contours,
         smoothing_radius_grid_cells, plot_soundings, allow_whitespace,
         plot_panel_names, add_titles, label_colour_bars, colour_bar_length,
         output_dir_name):
    """Plots saliency maps.

    This is effectively the main method.

    :param input_file_name: See documentation at top of file.
    :param colour_map_name: Same.
    :param max_colour_value: Same.
    :param half_num_contours: Same.
    :param smoothing_radius_grid_cells: Same.
    :param plot_soundings: Same.
    :param allow_whitespace: Same.
    :param plot_panel_names: Same.
    :param add_titles: Same.
    :param label_colour_bars: Same.
    :param colour_bar_length: Same.
    :param output_dir_name: Same.
    """

    if max_colour_value <= 0:
        max_colour_value = None
    if smoothing_radius_grid_cells <= 0:
        smoothing_radius_grid_cells = None

    file_system_utils.mkdir_recursive_if_necessary(
        directory_name=output_dir_name)

    colour_map_object = pyplot.cm.get_cmap(colour_map_name)
    error_checking.assert_is_geq(half_num_contours, 5)

    print('Reading data from: "{0:s}"...'.format(input_file_name))
    saliency_dict, pmm_flag = saliency_maps.read_file(input_file_name)

    if pmm_flag:
        predictor_matrices = saliency_dict.pop(
            saliency_maps.MEAN_PREDICTOR_MATRICES_KEY)
        saliency_matrices = saliency_dict.pop(
            saliency_maps.MEAN_SALIENCY_MATRICES_KEY)

        full_storm_id_strings = [None]
        storm_times_unix_sec = [None]

        mean_sounding_pressures_pa = saliency_dict[
            saliency_maps.MEAN_SOUNDING_PRESSURES_KEY]
        sounding_pressure_matrix_pa = numpy.reshape(
            mean_sounding_pressures_pa, (1, len(mean_sounding_pressures_pa))
        )

        for i in range(len(predictor_matrices)):
            predictor_matrices[i] = numpy.expand_dims(
                predictor_matrices[i], axis=0
            )
            saliency_matrices[i] = numpy.expand_dims(
                saliency_matrices[i], axis=0
            )
    else:
        predictor_matrices = saliency_dict.pop(
            saliency_maps.PREDICTOR_MATRICES_KEY)
        saliency_matrices = saliency_dict.pop(
            saliency_maps.SALIENCY_MATRICES_KEY)

        full_storm_id_strings = saliency_dict[saliency_maps.FULL_STORM_IDS_KEY]
        storm_times_unix_sec = saliency_dict[saliency_maps.STORM_TIMES_KEY]
        sounding_pressure_matrix_pa = saliency_dict[
            saliency_maps.SOUNDING_PRESSURES_KEY]

    if smoothing_radius_grid_cells is not None:
        saliency_matrices = _smooth_maps(
            saliency_matrices=saliency_matrices,
            smoothing_radius_grid_cells=smoothing_radius_grid_cells)

    model_file_name = saliency_dict[saliency_maps.MODEL_FILE_KEY]
    model_metafile_name = '{0:s}/model_metadata.p'.format(
        os.path.split(model_file_name)[0]
    )

    print('Reading metadata from: "{0:s}"...'.format(model_metafile_name))
    model_metadata_dict = cnn.read_model_metadata(model_metafile_name)
    print(SEPARATOR_STRING)

    training_option_dict = model_metadata_dict[cnn.TRAINING_OPTION_DICT_KEY]
    num_radar_matrices = len(predictor_matrices)

    if training_option_dict[trainval_io.SOUNDING_FIELDS_KEY] is None:
        plot_soundings = False
    else:
        num_radar_matrices -= 1

    num_examples = predictor_matrices[0].shape[0]

    for i in range(num_examples):
        this_handle_dict = plot_examples.plot_one_example(
            list_of_predictor_matrices=predictor_matrices,
            model_metadata_dict=model_metadata_dict, pmm_flag=pmm_flag,
            example_index=i, plot_sounding=plot_soundings,
            sounding_pressures_pascals=sounding_pressure_matrix_pa[i, ...],
            allow_whitespace=allow_whitespace,
            plot_panel_names=plot_panel_names, add_titles=add_titles,
            label_colour_bars=label_colour_bars,
            colour_bar_length=colour_bar_length)

        if plot_soundings:
            _plot_sounding_saliency(
                saliency_matrix=saliency_matrices[-1][i, ...],
                colour_map_object=colour_map_object,
                max_colour_value=max_colour_value,
                sounding_figure_object=this_handle_dict[
                    plot_examples.SOUNDING_FIGURE_KEY],
                sounding_axes_object=this_handle_dict[
                    plot_examples.SOUNDING_AXES_KEY],
                sounding_pressures_pascals=sounding_pressure_matrix_pa[i, ...],
                saliency_dict=saliency_dict,
                model_metadata_dict=model_metadata_dict, add_title=add_titles,
                output_dir_name=output_dir_name, pmm_flag=pmm_flag,
                example_index=i)

        these_figure_objects = this_handle_dict[plot_examples.RADAR_FIGURES_KEY]
        these_axes_object_matrices = this_handle_dict[
            plot_examples.RADAR_AXES_KEY]

        for j in range(num_radar_matrices):
            this_num_spatial_dim = len(predictor_matrices[j].shape) - 2

            if this_num_spatial_dim == 3:
                _plot_3d_radar_saliency(
                    saliency_matrix=saliency_matrices[j][i, ...],
                    colour_map_object=colour_map_object,
                    max_colour_value=max_colour_value,
                    half_num_contours=half_num_contours,
                    label_colour_bars=label_colour_bars,
                    colour_bar_length=colour_bar_length,
                    figure_objects=these_figure_objects,
                    axes_object_matrices=these_axes_object_matrices,
                    model_metadata_dict=model_metadata_dict,
                    output_dir_name=output_dir_name,
                    significance_matrix=None,
                    full_storm_id_string=full_storm_id_strings[i],
                    storm_time_unix_sec=storm_times_unix_sec[i]
                )
            else:
                _plot_2d_radar_saliency(
                    saliency_matrix=saliency_matrices[j][i, ...],
                    colour_map_object=colour_map_object,
                    max_colour_value=max_colour_value,
                    half_num_contours=half_num_contours,
                    label_colour_bars=label_colour_bars,
                    colour_bar_length=colour_bar_length,
                    figure_objects=these_figure_objects,
                    axes_object_matrices=these_axes_object_matrices,
                    model_metadata_dict=model_metadata_dict,
                    output_dir_name=output_dir_name,
                    significance_matrix=None,
                    full_storm_id_string=full_storm_id_strings[i],
                    storm_time_unix_sec=storm_times_unix_sec[i]
                )
def _read_one_composite(saliency_file_name, smoothing_radius_grid_cells,
                        monte_carlo_file_name):
    """Reads saliency map for one composite.

    E = number of examples
    M = number of rows in grid
    N = number of columns in grid
    H = number of heights in grid
    F = number of radar fields

    :param saliency_file_name: Path to input file (will be read by
        `saliency.read_file`).
    :param smoothing_radius_grid_cells: Radius for Gaussian smoother, used only
        for saliency map.
    :param monte_carlo_file_name: Path to Monte Carlo file (will be read by
        `_read_monte_carlo_file`).
    :return: mean_radar_matrix: E-by-M-by-N-by-H-by-F numpy array with mean
        radar fields.
    :return: mean_saliency_matrix: E-by-M-by-N-by-H-by-F numpy array with mean
        saliency fields.
    :return: significance_matrix: E-by-M-by-N-by-H-by-F numpy array of Boolean
        flags.
    :return: model_metadata_dict: Dictionary returned by
        `cnn.read_model_metadata`.
    """

    print('Reading saliency maps from: "{0:s}"...'.format(saliency_file_name))
    saliency_dict = saliency_maps.read_file(saliency_file_name)[0]

    mean_radar_matrix = numpy.expand_dims(
        saliency_dict[saliency_maps.MEAN_PREDICTOR_MATRICES_KEY][0], axis=0)
    mean_saliency_matrix = numpy.expand_dims(
        saliency_dict[saliency_maps.MEAN_SALIENCY_MATRICES_KEY][0], axis=0)

    if smoothing_radius_grid_cells is not None:
        print((
            'Smoothing saliency maps with Gaussian filter (e-folding radius of '
            '{0:.1f} grid cells)...').format(smoothing_radius_grid_cells))

        num_fields = mean_radar_matrix.shape[-1]

        for k in range(num_fields):
            mean_saliency_matrix[0, ..., k] = (
                general_utils.apply_gaussian_filter(
                    input_matrix=mean_saliency_matrix[0, ..., k],
                    e_folding_radius_grid_cells=smoothing_radius_grid_cells))

    model_file_name = saliency_dict[saliency_maps.MODEL_FILE_KEY]
    model_metafile_name = cnn.find_metafile(model_file_name)

    if monte_carlo_file_name is None:
        significance_matrix = numpy.full(mean_radar_matrix.shape,
                                         False,
                                         dtype=bool)
    else:
        print('Reading Monte Carlo test from: "{0:s}"...'.format(
            monte_carlo_file_name))

        this_file_handle = open(monte_carlo_file_name, 'rb')
        monte_carlo_dict = pickle.load(this_file_handle)
        this_file_handle.close()

        significance_matrix = numpy.logical_or(
            monte_carlo_dict[monte_carlo.TRIAL_PMM_MATRICES_KEY][0] <
            monte_carlo_dict[monte_carlo.MIN_MATRICES_KEY][0],
            monte_carlo_dict[monte_carlo.TRIAL_PMM_MATRICES_KEY][0] >
            monte_carlo_dict[monte_carlo.MAX_MATRICES_KEY][0])
        significance_matrix = numpy.expand_dims(significance_matrix, axis=0)

    print('Fraction of significant differences: {0:.4f}'.format(
        numpy.mean(significance_matrix.astype(float))))

    print('Reading CNN metadata from: "{0:s}"...'.format(model_metafile_name))
    model_metadata_dict = cnn.read_model_metadata(model_metafile_name)
    training_option_dict = model_metadata_dict[cnn.TRAINING_OPTION_DICT_KEY]

    good_indices = numpy.array([
        numpy.where(
            training_option_dict[trainval_io.RADAR_HEIGHTS_KEY] == h)[0][0]
        for h in RADAR_HEIGHTS_M_AGL
    ],
                               dtype=int)

    mean_radar_matrix = mean_radar_matrix[..., good_indices, :]
    mean_saliency_matrix = mean_saliency_matrix[..., good_indices, :]
    significance_matrix = significance_matrix[..., good_indices, :]

    good_indices = numpy.array([
        training_option_dict[trainval_io.RADAR_FIELDS_KEY].index(f)
        for f in RADAR_FIELD_NAMES
    ],
                               dtype=int)

    mean_radar_matrix = mean_radar_matrix[..., good_indices]
    mean_saliency_matrix = mean_saliency_matrix[..., good_indices]
    significance_matrix = significance_matrix[..., good_indices]

    training_option_dict[trainval_io.RADAR_HEIGHTS_KEY] = RADAR_HEIGHTS_M_AGL
    training_option_dict[trainval_io.RADAR_FIELDS_KEY] = RADAR_FIELD_NAMES
    training_option_dict[trainval_io.SOUNDING_FIELDS_KEY] = None
    model_metadata_dict[cnn.TRAINING_OPTION_DICT_KEY] = training_option_dict

    return (mean_radar_matrix, mean_saliency_matrix, significance_matrix,
            model_metadata_dict)
Beispiel #7
0
def _run(actual_file_name, dummy_file_name, smoothing_radius_grid_cells,
         max_pmm_percentile_level, num_iterations, confidence_level,
         output_file_name):
    """Runs Monte Carlo test for saliency maps.

    This is effectively the main method.

    :param actual_file_name: See documentation at top of file.
    :param dummy_file_name: Same.
    :param smoothing_radius_grid_cells: Same.
    :param max_pmm_percentile_level: Same.
    :param num_iterations: Same.
    :param confidence_level: Same.
    :param output_file_name: Same.
    """

    if smoothing_radius_grid_cells <= 0:
        smoothing_radius_grid_cells = None

    # Read saliency maps.
    print('Reading actual saliency maps from: "{0:s}"...'.format(
        actual_file_name))
    actual_saliency_dict = saliency_maps.read_file(actual_file_name)[0]
    actual_saliency_matrices = (
        actual_saliency_dict[saliency_maps.SALIENCY_MATRICES_KEY])

    print(
        'Reading dummy saliency maps from: "{0:s}"...'.format(dummy_file_name))
    dummy_saliency_dict = saliency_maps.read_file(dummy_file_name)[0]
    dummy_saliency_matrices = (
        dummy_saliency_dict[saliency_maps.SALIENCY_MATRICES_KEY])

    # Ensure that the two files contain the same examples.
    assert (actual_saliency_dict[saliency_maps.FULL_STORM_IDS_KEY] ==
            dummy_saliency_dict[saliency_maps.FULL_STORM_IDS_KEY])

    assert numpy.array_equal(
        actual_saliency_dict[saliency_maps.STORM_TIMES_KEY],
        dummy_saliency_dict[saliency_maps.STORM_TIMES_KEY])

    if smoothing_radius_grid_cells is not None:
        actual_saliency_matrices = _smooth_maps(
            saliency_matrices=actual_saliency_matrices,
            smoothing_radius_grid_cells=smoothing_radius_grid_cells)
        dummy_saliency_matrices = _smooth_maps(
            saliency_matrices=dummy_saliency_matrices,
            smoothing_radius_grid_cells=smoothing_radius_grid_cells)

    # Convert saliency from absolute values to percentiles.
    num_matrices = len(actual_saliency_matrices)
    num_examples = actual_saliency_matrices[0].shape[0]

    for j in range(num_matrices):
        for i in range(num_examples):
            this_num_channels = actual_saliency_matrices[j].shape[-1]

            for k in range(this_num_channels):
                this_flat_array = numpy.ravel(actual_saliency_matrices[j][i,
                                                                          ...,
                                                                          k])
                these_flat_ranks = (
                    scipy.stats.rankdata(this_flat_array, method='average') /
                    len(this_flat_array))
                actual_saliency_matrices[j][i, ..., k] = numpy.reshape(
                    these_flat_ranks, actual_saliency_matrices[j][i, ...,
                                                                  k].shape)

                this_flat_array = numpy.ravel(dummy_saliency_matrices[j][i,
                                                                         ...,
                                                                         k])
                these_flat_ranks = (
                    scipy.stats.rankdata(this_flat_array, method='average') /
                    len(this_flat_array))
                dummy_saliency_matrices[j][i, ..., k] = numpy.reshape(
                    these_flat_ranks, dummy_saliency_matrices[j][i, ...,
                                                                 k].shape)

    # Do Monte Carlo test.
    monte_carlo_dict = monte_carlo.run_monte_carlo_test(
        list_of_baseline_matrices=dummy_saliency_matrices,
        list_of_trial_matrices=actual_saliency_matrices,
        max_pmm_percentile_level=max_pmm_percentile_level,
        num_iterations=num_iterations,
        confidence_level=confidence_level)

    print('Writing results of Monte Carlo test to file: "{0:s}"...'.format(
        output_file_name))
    _write_results(monte_carlo_dict=monte_carlo_dict,
                   pickle_file_name=output_file_name)