def test_denormalize_radar_images_5d_z(self): """Ensures correct output from denormalize_radar_images. In this case, the input matrix is 5-D and normalization type is z-score. """ this_radar_matrix = dl_utils.denormalize_radar_images( radar_image_matrix=copy.deepcopy(RADAR_MATRIX_5D_Z_SCORES), field_names=RADAR_FIELD_NAMES, normalization_type_string=dl_utils.Z_NORMALIZATION_TYPE_STRING, normalization_param_file_name=None, test_mode=True, normalization_table=RADAR_NORMALIZATION_TABLE) self.assertTrue( numpy.allclose(this_radar_matrix, RADAR_MATRIX_5D_UNNORMALIZED, atol=TOLERANCE, equal_nan=True))
def test_denormalize_radar_images_4d_minmax(self): """Ensures correct output from denormalize_radar_images. In this case, the input matrix is 4-D and normalization type is minmax. """ this_radar_matrix = dl_utils.denormalize_radar_images( radar_image_matrix=copy.deepcopy(RADAR_MATRIX_4D_MINMAX), field_names=RADAR_FIELD_NAMES, normalization_type_string=dl_utils. MINMAX_NORMALIZATION_TYPE_STRING, normalization_param_file_name=None, test_mode=True, min_normalized_value=MIN_NORMALIZED_VALUE, max_normalized_value=MAX_NORMALIZED_VALUE, normalization_table=RADAR_NORMALIZATION_TABLE) self.assertTrue( numpy.allclose(this_radar_matrix, RADAR_MATRIX_4D_UNNORMALIZED, atol=TOLERANCE, equal_nan=True))
def denormalize_data(list_of_input_matrices, model_metadata_dict): """Denormalizes input data for a Keras model. E = number of examples (storm objects) H = number of height levels per sounding :param list_of_input_matrices: length-T list of input matrices (numpy arrays), where T = number of input tensors to the model. :param model_metadata_dict: Dictionary with metadata for the relevant model, created by `cnn.read_model_metadata`. :return: list_of_input_matrices: Denormalized version of input (same dimensions). """ training_option_dict = model_metadata_dict[cnn.TRAINING_OPTION_DICT_KEY] if model_metadata_dict[cnn.CONV_2D3D_KEY]: list_of_input_matrices[0] = dl_utils.denormalize_radar_images( radar_image_matrix=list_of_input_matrices[0], field_names=[radar_utils.REFL_NAME], normalization_type_string=training_option_dict[ trainval_io.NORMALIZATION_TYPE_KEY], normalization_param_file_name=training_option_dict[ trainval_io.NORMALIZATION_FILE_KEY], min_normalized_value=training_option_dict[ trainval_io.MIN_NORMALIZED_VALUE_KEY], max_normalized_value=training_option_dict[ trainval_io.MAX_NORMALIZED_VALUE_KEY]) list_of_input_matrices[1] = dl_utils.denormalize_radar_images( radar_image_matrix=list_of_input_matrices[1], field_names=training_option_dict[trainval_io.RADAR_FIELDS_KEY], normalization_type_string=training_option_dict[ trainval_io.NORMALIZATION_TYPE_KEY], normalization_param_file_name=training_option_dict[ trainval_io.NORMALIZATION_FILE_KEY], min_normalized_value=training_option_dict[ trainval_io.MIN_NORMALIZED_VALUE_KEY], max_normalized_value=training_option_dict[ trainval_io.MAX_NORMALIZED_VALUE_KEY]) else: list_of_layer_operation_dicts = model_metadata_dict[ cnn.LAYER_OPERATIONS_KEY] if list_of_layer_operation_dicts is None: radar_field_names = training_option_dict[ trainval_io.RADAR_FIELDS_KEY] else: radar_field_names = [ d[input_examples.RADAR_FIELD_KEY] for d in list_of_layer_operation_dicts ] list_of_input_matrices[0] = dl_utils.denormalize_radar_images( radar_image_matrix=list_of_input_matrices[0], field_names=radar_field_names, normalization_type_string=training_option_dict[ trainval_io.NORMALIZATION_TYPE_KEY], normalization_param_file_name=training_option_dict[ trainval_io.NORMALIZATION_FILE_KEY], min_normalized_value=training_option_dict[ trainval_io.MIN_NORMALIZED_VALUE_KEY], max_normalized_value=training_option_dict[ trainval_io.MAX_NORMALIZED_VALUE_KEY]) if training_option_dict[trainval_io.SOUNDING_FIELDS_KEY] is not None: list_of_input_matrices[-1] = dl_utils.denormalize_soundings( sounding_matrix=list_of_input_matrices[-1], field_names=training_option_dict[trainval_io.SOUNDING_FIELDS_KEY], normalization_type_string=training_option_dict[ trainval_io.NORMALIZATION_TYPE_KEY], normalization_param_file_name=training_option_dict[ trainval_io.NORMALIZATION_FILE_KEY], min_normalized_value=training_option_dict[ trainval_io.MIN_NORMALIZED_VALUE_KEY], max_normalized_value=training_option_dict[ trainval_io.MAX_NORMALIZED_VALUE_KEY]) return list_of_input_matrices
def _run(example_file_name, example_indices, num_radar_rows, num_radar_columns, normalization_file_name, output_dir_name): """Plots data augmentation. This is effectively the main method. :param example_file_name: See documentation at top of file. :param example_indices: Same. :param num_radar_rows: Same. :param num_radar_columns: Same. :param normalization_file_name: Same. :param output_dir_name: Same. """ if num_radar_rows <= 0: num_radar_rows = None if num_radar_columns <= 0: num_radar_columns = None file_system_utils.mkdir_recursive_if_necessary( directory_name=output_dir_name) print('Reading data from: "{0:s}"...'.format(example_file_name)) example_dict = input_examples.read_example_file( netcdf_file_name=example_file_name, read_all_target_vars=True, include_soundings=False, num_rows_to_keep=num_radar_rows, num_columns_to_keep=num_radar_columns, radar_field_names_to_keep=[RADAR_FIELD_NAME], radar_heights_to_keep_m_agl=numpy.array([RADAR_HEIGHT_M_AGL], dtype=int)) if input_examples.REFL_IMAGE_MATRIX_KEY in example_dict: radar_matrix = example_dict[input_examples.REFL_IMAGE_MATRIX_KEY] else: radar_matrix = example_dict[input_examples.RADAR_IMAGE_MATRIX_KEY] num_examples_total = radar_matrix.shape[0] error_checking.assert_is_geq_numpy_array(example_indices, 0) error_checking.assert_is_less_than_numpy_array(example_indices, num_examples_total) radar_matrix = radar_matrix[example_indices, ...] full_storm_id_strings = [ example_dict[input_examples.FULL_IDS_KEY][k] for k in example_indices ] storm_times_unix_sec = example_dict[ input_examples.STORM_TIMES_KEY][example_indices] radar_matrix = dl_utils.normalize_radar_images( radar_image_matrix=radar_matrix, field_names=[RADAR_FIELD_NAME], normalization_type_string=NORMALIZATION_TYPE_STRING, normalization_param_file_name=normalization_file_name) num_examples = radar_matrix.shape[0] dummy_target_values = numpy.full(num_examples, 0, dtype=int) radar_matrix = trainval_io._augment_radar_images( list_of_predictor_matrices=[radar_matrix], target_array=dummy_target_values, x_translations_pixels=X_TRANSLATIONS_PX, y_translations_pixels=Y_TRANSLATIONS_PX, ccw_rotation_angles_deg=CCW_ROTATION_ANGLES_DEG, noise_standard_deviation=NOISE_STANDARD_DEVIATION, num_noisings=1, flip_in_x=False, flip_in_y=False)[0][0] radar_matrix = dl_utils.denormalize_radar_images( radar_image_matrix=radar_matrix, field_names=[RADAR_FIELD_NAME], normalization_type_string=NORMALIZATION_TYPE_STRING, normalization_param_file_name=normalization_file_name) orig_radar_matrix = radar_matrix[:num_examples, ...] radar_matrix = radar_matrix[num_examples:, ...] translated_radar_matrix = radar_matrix[:num_examples, ...] radar_matrix = radar_matrix[num_examples:, ...] rotated_radar_matrix = radar_matrix[:num_examples, ...] noised_radar_matrix = radar_matrix[num_examples:, ...] for i in range(num_examples): _plot_one_example(orig_radar_matrix=orig_radar_matrix[i, ...], translated_radar_matrix=translated_radar_matrix[i, ...], rotated_radar_matrix=rotated_radar_matrix[i, ...], noised_radar_matrix=noised_radar_matrix[i, ...], output_dir_name=output_dir_name, full_storm_id_string=full_storm_id_strings[i], storm_time_unix_sec=storm_times_unix_sec[i])