def test_upsample_reflectivity(self):
        """Ensures correct output from upsample_reflectivity."""

        this_radar_matrix = trainval_io.upsample_reflectivity(
            RADAR_MATRIX_ORIG + 0.)

        self.assertTrue(
            numpy.allclose(this_radar_matrix,
                           RADAR_MATRIX_UPSAMPLED,
                           atol=TOLERANCE))
def _convert_one_file(input_file_name, resolution_factor, output_file_name):
    """Converts examples in one file from GridRad to MYRORSS format.

    :param input_file_name: Path to input file (with GridRad examples).  Will be
        read by `input_examples.read_example_file`.
    :param resolution_factor: See documentation at top of file.
    :param output_file_name: Path to output file (with the same examples but in
        MYRORSS format).  Will be written by
        `input_examples.write_example_file`.
    """

    print('Reading GridRad examples from: "{0:s}"...'.format(input_file_name))
    example_dict = input_examples.read_example_file(
        netcdf_file_name=input_file_name, read_all_target_vars=True)

    refl_heights_m_agl = example_dict[input_examples.RADAR_HEIGHTS_KEY] + 0
    refl_index = example_dict[input_examples.RADAR_FIELDS_KEY].index(
        radar_utils.REFL_NAME)

    reflectivity_matrix_dbz = trainval_io.upsample_reflectivity(
        reflectivity_matrix_dbz=example_dict[
            input_examples.RADAR_IMAGE_MATRIX_KEY][..., refl_index],
        upsampling_factor=resolution_factor
    )

    reflectivity_matrix_dbz = numpy.expand_dims(
        reflectivity_matrix_dbz, axis=-1)

    example_dict = input_examples.reduce_examples_3d_to_2d(
        example_dict=example_dict,
        list_of_operation_dicts=[
            LL_SHEAR_OPERATION_DICT, ML_SHEAR_OPERATION_DICT
        ]
    )

    field_names = example_dict[input_examples.RADAR_FIELDS_KEY]
    min_heights_m_asl = example_dict[input_examples.MIN_RADAR_HEIGHTS_KEY]

    ll_shear_index = numpy.where(numpy.logical_and(
        numpy.array(field_names) == radar_utils.VORTICITY_NAME,
        min_heights_m_asl ==
        LL_SHEAR_OPERATION_DICT[input_examples.MIN_HEIGHT_KEY]
    ))[0]

    ll_shear_matrix_s01 = trainval_io.upsample_reflectivity(
        reflectivity_matrix_dbz=example_dict[
            input_examples.RADAR_IMAGE_MATRIX_KEY][..., ll_shear_index],
        upsampling_factor=resolution_factor * 2
    )

    ml_shear_index = numpy.where(numpy.logical_and(
        numpy.array(field_names) == radar_utils.VORTICITY_NAME,
        min_heights_m_asl ==
        ML_SHEAR_OPERATION_DICT[input_examples.MIN_HEIGHT_KEY]
    ))[0]

    ml_shear_matrix_s01 = trainval_io.upsample_reflectivity(
        reflectivity_matrix_dbz=example_dict[
            input_examples.RADAR_IMAGE_MATRIX_KEY][..., ml_shear_index],
        upsampling_factor=resolution_factor * 2
    )

    azimuthal_shear_matrix_s01 = VORTICITY_TO_AZ_SHEAR * numpy.concatenate(
        (ll_shear_matrix_s01, ml_shear_matrix_s01), axis=-1
    )

    example_dict[input_examples.REFL_IMAGE_MATRIX_KEY] = reflectivity_matrix_dbz
    example_dict[
        input_examples.AZ_SHEAR_IMAGE_MATRIX_KEY] = azimuthal_shear_matrix_s01

    example_dict[input_examples.RADAR_HEIGHTS_KEY] = refl_heights_m_agl
    example_dict[input_examples.RADAR_FIELDS_KEY] = [
        radar_utils.LOW_LEVEL_SHEAR_NAME, radar_utils.MID_LEVEL_SHEAR_NAME
    ]
    example_dict[input_examples.ROTATED_GRID_SPACING_KEY] /= resolution_factor

    example_dict.pop(input_examples.RADAR_IMAGE_MATRIX_KEY, None)
    example_dict.pop(input_examples.MIN_RADAR_HEIGHTS_KEY, None)
    example_dict.pop(input_examples.MAX_RADAR_HEIGHTS_KEY, None)
    example_dict.pop(input_examples.RADAR_LAYER_OPERATION_NAMES_KEY, None)

    print('Writing examples in MYRORSS format to: "{0:s}"...'.format(
        output_file_name
    ))

    input_examples.write_example_file(
        netcdf_file_name=output_file_name, example_dict=example_dict,
        append_to_file=False)
Esempio n. 3
0
def _convert_one_file_selected_examples(input_file_name, output_file_name,
                                        full_storm_id_strings,
                                        storm_times_unix_sec, append_to_file):
    """Converts selected examples in one file from MYRORSS to GridRad format.

    E = number of examples

    :param input_file_name: See doc for `_convert_one_file`.
    :param output_file_name: Same.
    :param full_storm_id_strings: length-E list of storm IDs.
    :param storm_times_unix_sec: length-E numpy array of storm times.
    :param append_to_file: Boolean flag.  If True, will append new examples to
        output file.  If False, will overwrite output file.
    """

    print('Reading MYRORSS examples from: "{0:s}"...'.format(input_file_name))
    example_dict = input_examples.read_specific_examples(
        netcdf_file_name=input_file_name,
        read_all_target_vars=True,
        full_storm_id_strings=full_storm_id_strings,
        storm_times_unix_sec=storm_times_unix_sec,
        radar_heights_to_keep_m_agl=REFL_HEIGHTS_M_AGL)

    # Add surface reflectivity, then double horizontal resolution.
    reflectivity_matrix_dbz = example_dict[
        input_examples.REFL_IMAGE_MATRIX_KEY][..., 0]

    reflectivity_matrix_dbz = numpy.concatenate(
        (reflectivity_matrix_dbz, reflectivity_matrix_dbz[..., [0]]), axis=-1)

    reflectivity_matrix_dbz = trainval_io.upsample_reflectivity(
        reflectivity_matrix_dbz=reflectivity_matrix_dbz, upsampling_factor=2)

    # Create vorticity matrix.
    shear_field_names = example_dict[input_examples.RADAR_FIELDS_KEY]
    ll_shear_index = shear_field_names.index(radar_utils.LOW_LEVEL_SHEAR_NAME)
    ml_shear_index = shear_field_names.index(radar_utils.MID_LEVEL_SHEAR_NAME)

    ll_shear_matrix_s01 = example_dict[
        input_examples.AZ_SHEAR_IMAGE_MATRIX_KEY][..., ll_shear_index]

    ml_shear_matrix_s01 = example_dict[
        input_examples.AZ_SHEAR_IMAGE_MATRIX_KEY][..., ml_shear_index]

    num_radar_heights = len(NEW_RADAR_HEIGHTS_M_AGL)
    these_dimensions = numpy.array(ll_shear_matrix_s01.shape +
                                   (num_radar_heights, ),
                                   dtype=int)
    vorticity_matrix_s01 = numpy.full(these_dimensions, numpy.nan)

    for k in range(num_radar_heights):
        if NEW_RADAR_HEIGHTS_M_AGL[k] > MAX_LL_SHEAR_HEIGHT_M_AGL:
            vorticity_matrix_s01[..., k] = ml_shear_matrix_s01
        else:
            vorticity_matrix_s01[..., k] = ll_shear_matrix_s01

    vorticity_matrix_s01 *= AZ_SHEAR_TO_VORTICITY
    radar_matrix = numpy.stack((reflectivity_matrix_dbz, vorticity_matrix_s01),
                               axis=-1)

    example_dict[input_examples.RADAR_IMAGE_MATRIX_KEY] = radar_matrix
    example_dict[input_examples.RADAR_HEIGHTS_KEY] = NEW_RADAR_HEIGHTS_M_AGL
    example_dict[input_examples.RADAR_FIELDS_KEY] = [
        radar_utils.REFL_NAME, radar_utils.VORTICITY_NAME
    ]
    example_dict[input_examples.ROTATED_GRID_SPACING_KEY] *= 0.5

    example_dict.pop(input_examples.REFL_IMAGE_MATRIX_KEY, None)
    example_dict.pop(input_examples.AZ_SHEAR_IMAGE_MATRIX_KEY, None)

    print('Writing examples in GridRad format to: "{0:s}"...'.format(
        output_file_name))

    input_examples.write_example_file(netcdf_file_name=output_file_name,
                                      example_dict=example_dict,
                                      append_to_file=append_to_file)
def _run(activation_file_name, storm_metafile_name, num_examples,
         top_example_dir_name, num_radar_rows, num_radar_columns,
         allow_whitespace, colour_bar_length, output_dir_name):
    """Plots one or more examples (storm objects) for human input.

    This is effectively the main method.

    :param activation_file_name: See documentation at top of file.
    :param storm_metafile_name: Same.
    :param num_examples: Same.
    :param top_example_dir_name: Same.
    :param num_radar_rows: Same.
    :param num_radar_columns: Same.
    :param allow_whitespace: Same.
    :param colour_bar_length: Same.
    :param output_dir_name: Same.
    """

    if num_radar_rows <= 0:
        num_radar_rows = None
    if num_radar_columns <= 0:
        num_radar_columns = None

    if activation_file_name in ['', 'None']:
        activation_file_name = None

    if activation_file_name is None:
        print('Reading data from: "{0:s}"...'.format(storm_metafile_name))
        full_storm_id_strings, storm_times_unix_sec = (
            tracking_io.read_ids_and_times(storm_metafile_name))

        training_option_dict = dict()
        training_option_dict[trainval_io.SOUNDING_FIELDS_KEY] = None
        training_option_dict[trainval_io.SOUNDING_HEIGHTS_KEY] = None

        training_option_dict[trainval_io.NUM_ROWS_KEY] = num_radar_rows
        training_option_dict[trainval_io.NUM_COLUMNS_KEY] = num_radar_columns
        training_option_dict[trainval_io.NORMALIZATION_TYPE_KEY] = None
        training_option_dict[trainval_io.TARGET_NAME_KEY] = DUMMY_TARGET_NAME
        training_option_dict[trainval_io.BINARIZE_TARGET_KEY] = False
        training_option_dict[trainval_io.SAMPLING_FRACTIONS_KEY] = None
        training_option_dict[trainval_io.REFLECTIVITY_MASK_KEY] = None

        model_metadata_dict = {cnn.LAYER_OPERATIONS_KEY: None}
    else:
        print('Reading data from: "{0:s}"...'.format(activation_file_name))
        activation_matrix, activation_metadata_dict = (
            model_activation.read_file(activation_file_name))

        num_model_components = activation_matrix.shape[1]
        if num_model_components > 1:
            error_string = (
                'The file should contain activations for only one model '
                'component, not {0:d}.').format(num_model_components)

            raise TypeError(error_string)

        full_storm_id_strings = activation_metadata_dict[
            model_activation.FULL_IDS_KEY]
        storm_times_unix_sec = activation_metadata_dict[
            model_activation.STORM_TIMES_KEY]

        model_file_name = activation_metadata_dict[
            model_activation.MODEL_FILE_NAME_KEY]
        model_metafile_name = '{0:s}/model_metadata.p'.format(
            os.path.split(model_file_name)[0])

        print('Reading metadata from: "{0:s}"...'.format(model_metafile_name))
        model_metadata_dict = cnn.read_model_metadata(model_metafile_name)

        training_option_dict = model_metadata_dict[
            cnn.TRAINING_OPTION_DICT_KEY]
        training_option_dict[trainval_io.NORMALIZATION_TYPE_KEY] = None
        training_option_dict[trainval_io.SAMPLING_FRACTIONS_KEY] = None
        training_option_dict[trainval_io.REFLECTIVITY_MASK_KEY] = None

    training_option_dict[trainval_io.RADAR_FIELDS_KEY] = SHEAR_FIELD_NAMES
    training_option_dict[trainval_io.RADAR_HEIGHTS_KEY] = REFL_HEIGHTS_M_AGL
    training_option_dict[trainval_io.UPSAMPLE_REFLECTIVITY_KEY] = False
    model_metadata_dict[cnn.TRAINING_OPTION_DICT_KEY] = training_option_dict

    if 0 < num_examples < len(full_storm_id_strings):
        full_storm_id_strings = full_storm_id_strings[:num_examples]
        storm_times_unix_sec = storm_times_unix_sec[:num_examples]

    print(SEPARATOR_STRING)
    example_dict = testing_io.read_predictors_specific_examples(
        top_example_dir_name=top_example_dir_name,
        desired_full_id_strings=full_storm_id_strings,
        desired_times_unix_sec=storm_times_unix_sec,
        option_dict=model_metadata_dict[cnn.TRAINING_OPTION_DICT_KEY],
        layer_operation_dicts=model_metadata_dict[cnn.LAYER_OPERATIONS_KEY])
    print(SEPARATOR_STRING)

    predictor_matrices = example_dict[testing_io.INPUT_MATRICES_KEY]

    # TODO(thunderhoser): The rest of this code is very HACKY.
    predictor_matrices[0] = trainval_io.upsample_reflectivity(
        predictor_matrices[0][..., 0])
    predictor_matrices[0] = numpy.expand_dims(predictor_matrices[0], axis=-1)

    example_dict = {
        input_examples.RADAR_FIELDS_KEY: SHEAR_FIELD_NAMES,
        input_examples.REFL_IMAGE_MATRIX_KEY: predictor_matrices[0],
        input_examples.AZ_SHEAR_IMAGE_MATRIX_KEY: predictor_matrices[1],
        input_examples.RADAR_HEIGHTS_KEY: REFL_HEIGHTS_M_AGL
    }

    example_dict = input_examples.reduce_examples_3d_to_2d(
        example_dict=example_dict,
        list_of_operation_dicts=[REFL_LAYER_OPERATION_DICT])

    predictor_matrices = [example_dict[input_examples.RADAR_IMAGE_MATRIX_KEY]]

    layer_operation_dicts = [{
        input_examples.RADAR_FIELD_KEY: f,
        input_examples.MIN_HEIGHT_KEY: h1,
        input_examples.MAX_HEIGHT_KEY: h2,
        input_examples.OPERATION_NAME_KEY: op
    } for f, h1, h2, op in zip(
        example_dict[input_examples.RADAR_FIELDS_KEY], example_dict[
            input_examples.MIN_RADAR_HEIGHTS_KEY], example_dict[
                input_examples.MAX_RADAR_HEIGHTS_KEY], example_dict[
                    input_examples.RADAR_LAYER_OPERATION_NAMES_KEY])]

    model_metadata_dict[cnn.LAYER_OPERATIONS_KEY] = layer_operation_dicts

    figure_file_names = plot_examples.plot_examples(
        list_of_predictor_matrices=predictor_matrices,
        model_metadata_dict=model_metadata_dict,
        pmm_flag=False,
        output_dir_name=output_dir_name,
        plot_soundings=False,
        allow_whitespace=allow_whitespace,
        plot_panel_names=False,
        add_titles=False,
        label_colour_bars=True,
        colour_bar_length=colour_bar_length,
        colour_bar_font_size=COLOUR_BAR_FONT_SIZE,
        figure_resolution_dpi=FIGURE_RESOLUTION_DPI,
        refl_opacity=REFL_OPACITY,
        plot_grid_lines=False,
        full_storm_id_strings=full_storm_id_strings,
        storm_times_unix_sec=storm_times_unix_sec)

    for this_file_name in figure_file_names:
        print('Resizing image to {0:d} pixels: "{1:s}"...'.format(
            FIGURE_SIZE_PIXELS, this_file_name))

        imagemagick_utils.resize_image(input_file_name=this_file_name,
                                       output_file_name=this_file_name,
                                       output_size_pixels=FIGURE_SIZE_PIXELS)