Example #1
0
def create_latlng_grid(min_latitude_deg, max_latitude_deg,
                       latitude_spacing_deg, min_longitude_deg,
                       max_longitude_deg, longitude_spacing_deg):
    """Creates lat-long grid.

    M = number of rows in grid
    N = number of columns in grid

    :param min_latitude_deg: Minimum latitude (deg N) in grid.
    :param max_latitude_deg: Max latitude (deg N) in grid.
    :param latitude_spacing_deg: Spacing (deg N) between grid points in adjacent
        rows.
    :param min_longitude_deg: Minimum longitude (deg E) in grid.
    :param max_longitude_deg: Max longitude (deg E) in grid.
    :param longitude_spacing_deg: Spacing (deg E) between grid points in
        adjacent columns.
    :return: grid_point_latitudes_deg: length-M numpy array with latitudes
        (deg N) of grid points.
    :return: grid_point_longitudes_deg: length-N numpy array with longitudes
        (deg E) of grid points.
    """

    # TODO(thunderhoser): Make this handle wrap-around issues.

    min_longitude_deg = longitude_conv.convert_lng_positive_in_west(
        min_longitude_deg)
    max_longitude_deg = longitude_conv.convert_lng_positive_in_west(
        max_longitude_deg)

    min_latitude_deg = number_rounding.floor_to_nearest(
        min_latitude_deg, latitude_spacing_deg)
    max_latitude_deg = number_rounding.ceiling_to_nearest(
        max_latitude_deg, latitude_spacing_deg)
    min_longitude_deg = number_rounding.floor_to_nearest(
        min_longitude_deg, longitude_spacing_deg)
    max_longitude_deg = number_rounding.ceiling_to_nearest(
        max_longitude_deg, longitude_spacing_deg)

    num_grid_rows = 1 + int(
        numpy.round(
            (max_latitude_deg - min_latitude_deg) / latitude_spacing_deg))
    num_grid_columns = 1 + int(
        numpy.round(
            (max_longitude_deg - min_longitude_deg) / longitude_spacing_deg))

    return grids.get_latlng_grid_points(min_latitude_deg=min_latitude_deg,
                                        min_longitude_deg=min_longitude_deg,
                                        lat_spacing_deg=latitude_spacing_deg,
                                        lng_spacing_deg=longitude_spacing_deg,
                                        num_rows=num_grid_rows,
                                        num_columns=num_grid_columns)
Example #2
0
def read_data_from_full_grid_file(netcdf_file_name, metadata_dict,
                                  raise_error_if_fails=True):
    """Reads full radar grid from raw (either MYRORSS or MRMS) file.

    This file should contain one radar field at one height and one time step.

    M = number of rows (unique grid-point latitudes)
    N = number of columns (unique grid-point longitudes)

    :param netcdf_file_name: Path to input file.
    :param metadata_dict: Dictionary with metadata for NetCDF file, created by
        read_metadata_from_raw_file.
    :param raise_error_if_fails: Boolean flag.  If True and file cannot be
        opened, this method will raise an error.  If False and file cannot be
        opened, this method will return None for all output variables.
    :return: field_matrix: M-by-N numpy array with values of radar field.
    :return: unique_grid_point_lat_deg: length-M numpy array of grid-point
        latitudes (deg N).  If array is increasing (decreasing), latitude
        increases (decreases) while traveling down the columns of field_matrix.
    :return: unique_grid_point_lng_deg: length-N numpy array of grid-point
        longitudes (deg E).  If array is increasing (decreasing), longitude
        increases (decreases) while traveling right across the rows of
        field_matrix.
    """

    error_checking.assert_file_exists(netcdf_file_name)

    netcdf_dataset = netcdf_io.open_netcdf(netcdf_file_name,
                                           raise_error_if_fails)
    if netcdf_dataset is None:
        return None, None, None

    field_matrix = netcdf_dataset.variables[
        metadata_dict[FIELD_NAME_COLUMN_ORIG]]
    netcdf_dataset.close()

    min_latitude_deg = metadata_dict[NW_GRID_POINT_LAT_COLUMN] - (
        metadata_dict[LAT_SPACING_COLUMN] * (metadata_dict[NUM_LAT_COLUMN] - 1))
    unique_grid_point_lat_deg, unique_grid_point_lng_deg = (
        grids.get_latlng_grid_points(
            min_latitude_deg=min_latitude_deg,
            min_longitude_deg=metadata_dict[NW_GRID_POINT_LNG_COLUMN],
            lat_spacing_deg=metadata_dict[LAT_SPACING_COLUMN],
            lng_spacing_deg=metadata_dict[LNG_SPACING_COLUMN],
            num_rows=metadata_dict[NUM_LAT_COLUMN],
            num_columns=metadata_dict[NUM_LNG_COLUMN]))

    field_matrix = _remove_sentinels_from_full_grid(
        field_matrix, metadata_dict[SENTINEL_VALUE_COLUMN])
    return (numpy.flipud(field_matrix), unique_grid_point_lat_deg[::-1],
            unique_grid_point_lng_deg)
Example #3
0
def read_data_from_full_grid_file(netcdf_file_name,
                                  metadata_dict,
                                  raise_error_if_fails=True):
    """Reads full radar grid from raw (either MYRORSS or MRMS) file.

    This file should contain one radar field at one height and valid time.

    :param netcdf_file_name: Path to input file.
    :param metadata_dict: Dictionary created by `read_metadata_from_raw_file`.
    :param raise_error_if_fails: Boolean flag.  If True and file cannot be read,
        this method will raise an error.  If False and file cannot be read, will
        return None for all output vars.
    :return: field_matrix: M-by-N numpy array with radar field.  Latitude
        increases while moving up each column, and longitude increases while
        moving right along each row.
    :return: grid_point_latitudes_deg: length-M numpy array of grid-point
        latitudes (deg N).  This array is monotonically decreasing.
    :return: grid_point_longitudes_deg: length-N numpy array of grid-point
        longitudes (deg E).  This array is monotonically increasing.
    """

    error_checking.assert_file_exists(netcdf_file_name)
    netcdf_dataset = netcdf_io.open_netcdf(netcdf_file_name,
                                           raise_error_if_fails)
    if netcdf_dataset is None:
        return None, None, None

    field_matrix = netcdf_dataset.variables[
        metadata_dict[FIELD_NAME_COLUMN_ORIG]]
    netcdf_dataset.close()

    min_latitude_deg = metadata_dict[radar_utils.NW_GRID_POINT_LAT_COLUMN] - (
        metadata_dict[radar_utils.LAT_SPACING_COLUMN] *
        (metadata_dict[radar_utils.NUM_LAT_COLUMN] - 1))
    grid_point_latitudes_deg, grid_point_longitudes_deg = (
        grids.get_latlng_grid_points(
            min_latitude_deg=min_latitude_deg,
            min_longitude_deg=metadata_dict[
                radar_utils.NW_GRID_POINT_LNG_COLUMN],
            lat_spacing_deg=metadata_dict[radar_utils.LAT_SPACING_COLUMN],
            lng_spacing_deg=metadata_dict[radar_utils.LNG_SPACING_COLUMN],
            num_rows=metadata_dict[radar_utils.NUM_LAT_COLUMN],
            num_columns=metadata_dict[radar_utils.NUM_LNG_COLUMN]))

    field_matrix = _remove_sentinels_from_full_grid(
        field_matrix, metadata_dict[radar_utils.SENTINEL_VALUE_COLUMN])
    return (numpy.flipud(field_matrix), grid_point_latitudes_deg[::-1],
            grid_point_longitudes_deg)
def sparse_to_full_grid(sparse_grid_table,
                        metadata_dict,
                        ignore_if_below=None):
    """Converts data from sparse to full grid (public wrapper for _convert).

    M = number of rows (unique grid-point latitudes)
    N = number of columns (unique grid-point longitudes)

    :param sparse_grid_table: pandas DataFrame created by
        `myrorss_and_mrms_io.read_data_from_sparse_grid_file`.
    :param metadata_dict: Dictionary created by
        `myrorss_and_mrms_io.read_metadata_from_raw_file`.
    :param ignore_if_below: This method will ignore radar values <
        `ignore_if_below`.  If None, this method will consider all values.
    :return: full_matrix: M-by-N numpy array of radar values.  Latitude
        decreases down each column, and longitude increases to the right along
        each row.
    :return: grid_point_latitudes_deg: length-M numpy array of grid-point
        latitudes (deg N), sorted in descending order.
    :return: grid_point_longitudes_deg: length-N numpy array of grid-point
        longitudes (deg E), sorted in acending order.
    """

    min_latitude_deg = (metadata_dict[radar_utils.NW_GRID_POINT_LAT_COLUMN] -
                        (metadata_dict[radar_utils.LAT_SPACING_COLUMN] *
                         (metadata_dict[radar_utils.NUM_LAT_COLUMN] - 1)))

    unique_grid_point_lat_deg, unique_grid_point_lng_deg = (
        grids.get_latlng_grid_points(
            min_latitude_deg=min_latitude_deg,
            min_longitude_deg=metadata_dict[
                radar_utils.NW_GRID_POINT_LNG_COLUMN],
            lat_spacing_deg=metadata_dict[radar_utils.LAT_SPACING_COLUMN],
            lng_spacing_deg=metadata_dict[radar_utils.LNG_SPACING_COLUMN],
            num_rows=metadata_dict[radar_utils.NUM_LAT_COLUMN],
            num_columns=metadata_dict[radar_utils.NUM_LNG_COLUMN]))

    full_matrix = _convert(
        sparse_grid_table=sparse_grid_table,
        field_name=metadata_dict[radar_utils.FIELD_NAME_COLUMN],
        num_grid_rows=metadata_dict[radar_utils.NUM_LAT_COLUMN],
        num_grid_columns=metadata_dict[radar_utils.NUM_LNG_COLUMN],
        ignore_if_below=ignore_if_below)

    return (full_matrix, unique_grid_point_lat_deg[::-1],
            unique_grid_point_lng_deg)
Example #5
0
    def test_get_latlng_grid_points(self):
        """Ensures correct output from get_latlng_grid_points."""

        these_latitudes_deg, these_longitudes_deg = (
            grids.get_latlng_grid_points(min_latitude_deg=MIN_LATITUDE_DEG,
                                         min_longitude_deg=MIN_LONGITUDE_DEG,
                                         lat_spacing_deg=LATITUDE_SPACING_DEG,
                                         lng_spacing_deg=LONGITUDE_SPACING_DEG,
                                         num_rows=NUM_LATLNG_ROWS,
                                         num_columns=NUM_LATLNG_COLUMNS))

        self.assertTrue(
            numpy.allclose(these_latitudes_deg,
                           POINT_LATITUDES_DEG,
                           atol=TOLERANCE))
        self.assertTrue(
            numpy.allclose(these_longitudes_deg,
                           POINT_LONGITUDES_DEG,
                           atol=TOLERANCE))
def write_classifications(convective_flag_matrix, grid_metadata_dict,
                          valid_time_unix_sec, option_dict, netcdf_file_name):
    """Writes echo classifications to NetCDF file.

    :param convective_flag_matrix: M-by-N numpy array of Boolean flags (True
        if convective, False if not).
    :param grid_metadata_dict: See doc for `find_convective_pixels`.
    :param valid_time_unix_sec: Same.
    :param option_dict: Same.
    :param netcdf_file_name: Path to output file.
    """

    # Error-checking.
    error_checking.assert_is_boolean_numpy_array(convective_flag_matrix)
    error_checking.assert_is_numpy_array(convective_flag_matrix,
                                         num_dimensions=2)
    error_checking.assert_is_integer(valid_time_unix_sec)

    option_dict = _check_input_args(option_dict)

    peakedness_neigh_metres = option_dict[PEAKEDNESS_NEIGH_KEY]
    max_peakedness_height_m_asl = option_dict[MAX_PEAKEDNESS_HEIGHT_KEY]
    min_height_fraction_for_peakedness = option_dict[MIN_HEIGHT_FRACTION_KEY]
    halve_resolution_for_peakedness = option_dict[HALVE_RESOLUTION_KEY]
    min_echo_top_m_asl = option_dict[MIN_ECHO_TOP_KEY]
    echo_top_level_dbz = option_dict[ECHO_TOP_LEVEL_KEY]
    min_size_pixels = option_dict[MIN_SIZE_KEY]
    min_composite_refl_criterion1_dbz = (
        option_dict[MIN_COMPOSITE_REFL_CRITERION1_KEY])
    min_composite_refl_criterion5_dbz = (
        option_dict[MIN_COMPOSITE_REFL_CRITERION5_KEY])
    min_composite_refl_aml_dbz = option_dict[MIN_COMPOSITE_REFL_AML_KEY]

    if min_composite_refl_criterion1_dbz is None:
        min_composite_refl_criterion1_dbz = -1.

    file_system_utils.mkdir_recursive_if_necessary(file_name=netcdf_file_name)
    netcdf_dataset = netCDF4.Dataset(netcdf_file_name,
                                     'w',
                                     format='NETCDF3_64BIT_OFFSET')

    netcdf_dataset.setncattr(PEAKEDNESS_NEIGH_KEY, peakedness_neigh_metres)
    netcdf_dataset.setncattr(MAX_PEAKEDNESS_HEIGHT_KEY,
                             max_peakedness_height_m_asl)
    netcdf_dataset.setncattr(MIN_HEIGHT_FRACTION_KEY,
                             min_height_fraction_for_peakedness)
    netcdf_dataset.setncattr(HALVE_RESOLUTION_KEY,
                             int(halve_resolution_for_peakedness))
    netcdf_dataset.setncattr(MIN_ECHO_TOP_KEY, min_echo_top_m_asl)
    netcdf_dataset.setncattr(ECHO_TOP_LEVEL_KEY, echo_top_level_dbz)
    netcdf_dataset.setncattr(MIN_SIZE_KEY, min_size_pixels)
    netcdf_dataset.setncattr(MIN_COMPOSITE_REFL_CRITERION1_KEY,
                             min_composite_refl_criterion1_dbz)
    netcdf_dataset.setncattr(MIN_COMPOSITE_REFL_CRITERION5_KEY,
                             min_composite_refl_criterion5_dbz)
    netcdf_dataset.setncattr(MIN_COMPOSITE_REFL_AML_KEY,
                             min_composite_refl_aml_dbz)
    netcdf_dataset.setncattr(VALID_TIME_KEY, valid_time_unix_sec)

    netcdf_dataset.createDimension(ROW_DIMENSION_KEY,
                                   convective_flag_matrix.shape[0])
    netcdf_dataset.createDimension(COLUMN_DIMENSION_KEY,
                                   convective_flag_matrix.shape[1])

    grid_point_latitudes_deg, grid_point_longitudes_deg = (
        grids.get_latlng_grid_points(
            min_latitude_deg=grid_metadata_dict[MIN_LATITUDE_KEY],
            min_longitude_deg=grid_metadata_dict[MIN_LONGITUDE_KEY],
            lat_spacing_deg=grid_metadata_dict[LATITUDE_SPACING_KEY],
            lng_spacing_deg=grid_metadata_dict[LONGITUDE_SPACING_KEY],
            num_rows=convective_flag_matrix.shape[0],
            num_columns=convective_flag_matrix.shape[1]))

    netcdf_dataset.createVariable(LATITUDES_KEY,
                                  datatype=numpy.float32,
                                  dimensions=ROW_DIMENSION_KEY)
    netcdf_dataset.variables[LATITUDES_KEY][:] = grid_point_latitudes_deg

    netcdf_dataset.createVariable(LONGITUDES_KEY,
                                  datatype=numpy.float32,
                                  dimensions=COLUMN_DIMENSION_KEY)
    netcdf_dataset.variables[LONGITUDES_KEY][:] = grid_point_longitudes_deg

    netcdf_dataset.createVariable(FLAG_MATRIX_KEY,
                                  datatype=numpy.int32,
                                  dimensions=(ROW_DIMENSION_KEY,
                                              COLUMN_DIMENSION_KEY))
    netcdf_dataset.variables[FLAG_MATRIX_KEY][:] = convective_flag_matrix

    netcdf_dataset.close()
def find_convective_pixels(reflectivity_matrix_dbz, grid_metadata_dict,
                           valid_time_unix_sec, option_dict):
    """Classifies pixels (horiz grid points) as convective or non-convective.

    :param reflectivity_matrix_dbz: M-by-N-by-H numpy array of reflectivity
        values.  Latitude should increase along the first axis; longitude should
        increase along the second axis; height should increase along the third
        axis.  MAKE SURE NOT TO FLIP YOUR LATITUDES.

    :param grid_metadata_dict: Dictionary with the following keys.
    grid_metadata_dict['min_grid_point_latitude_deg']: Minimum latitude (deg N)
        over all grid points.
    grid_metadata_dict['latitude_spacing_deg']: Spacing (deg N) between grid
        points in adjacent rows.
    grid_metadata_dict['min_grid_point_longitude_deg']: Minimum longitude
        (deg E) over all grid points.
    grid_metadata_dict['longitude_spacing_deg']: Spacing (deg E) between grid
        points in adjacent columns.
    grid_metadata_dict['grid_point_heights_m_asl']: length-H numpy array of
        heights (metres above sea level) at grid points.

    :param valid_time_unix_sec: Valid time.

    :param option_dict: Dictionary with the following keys.
    option_dict['peakedness_neigh_metres'] Neighbourhood radius for peakedness
        calculations (metres), used for criterion 1.
    option_dict['max_peakedness_height_m_asl'] Max height (metres above sea
        level) for peakedness calculations, used in criterion 1.
    option_dict['min_height_fraction_for_peakedness']: Minimum fraction of
        heights that exceed peakedness threshold, used in criterion 1.  At each
        horizontal location, at least this fraction of heights must exceed the
        threshold.
    option_dict['halve_resolution_for_peakedness'] Boolean flag.  If True,
        horizontal grid resolution will be halved for peakedness calculations.
    option_dict['min_echo_top_m_asl'] Minimum echo top (metres above sea level),
        used for criterion 3.
    option_dict['echo_top_level_dbz'] Critical reflectivity (used to compute
        echo top for criterion 3).
    option_dict['min_size_pixels']: Minimum connected-region size (for criterion
        4).
    option_dict['min_composite_refl_criterion1_dbz'] Minimum composite
        (column-max) reflectivity for criterion 1.  This may be None.
    option_dict['min_composite_refl_criterion5_dbz'] Minimum composite
        reflectivity for criterion 5.
    option_dict['min_composite_refl_aml_dbz'] Minimum composite reflectivity
        above melting level, used for criterion 2.

    :return: convective_flag_matrix: M-by-N numpy array of Boolean flags (True
        if convective, False if not).
    :return: option_dict: Same as input, except some values may have been
        replaced by defaults.
    """

    # Error-checking.
    error_checking.assert_is_numpy_array(reflectivity_matrix_dbz,
                                         num_dimensions=3)

    option_dict = _check_input_args(option_dict)

    peakedness_neigh_metres = option_dict[PEAKEDNESS_NEIGH_KEY]
    max_peakedness_height_m_asl = option_dict[MAX_PEAKEDNESS_HEIGHT_KEY]
    min_height_fraction_for_peakedness = option_dict[MIN_HEIGHT_FRACTION_KEY]
    halve_resolution_for_peakedness = option_dict[HALVE_RESOLUTION_KEY]
    min_echo_top_m_asl = option_dict[MIN_ECHO_TOP_KEY]
    echo_top_level_dbz = option_dict[ECHO_TOP_LEVEL_KEY]
    min_size_pixels = option_dict[MIN_SIZE_KEY]
    min_composite_refl_criterion1_dbz = (
        option_dict[MIN_COMPOSITE_REFL_CRITERION1_KEY])
    min_composite_refl_criterion5_dbz = (
        option_dict[MIN_COMPOSITE_REFL_CRITERION5_KEY])
    min_composite_refl_aml_dbz = option_dict[MIN_COMPOSITE_REFL_AML_KEY]

    grid_point_heights_m_asl = numpy.round(
        grid_metadata_dict[HEIGHTS_KEY]).astype(int)

    error_checking.assert_is_numpy_array(grid_point_heights_m_asl,
                                         num_dimensions=1)
    error_checking.assert_is_geq_numpy_array(grid_point_heights_m_asl, 0)
    error_checking.assert_is_greater_numpy_array(
        numpy.diff(grid_point_heights_m_asl), 0)

    # Compute grid-point coordinates.
    num_rows = reflectivity_matrix_dbz.shape[0]
    num_columns = reflectivity_matrix_dbz.shape[1]

    grid_point_latitudes_deg, grid_point_longitudes_deg = (
        grids.get_latlng_grid_points(
            min_latitude_deg=grid_metadata_dict[MIN_LATITUDE_KEY],
            min_longitude_deg=grid_metadata_dict[MIN_LONGITUDE_KEY],
            lat_spacing_deg=grid_metadata_dict[LATITUDE_SPACING_KEY],
            lng_spacing_deg=grid_metadata_dict[LONGITUDE_SPACING_KEY],
            num_rows=num_rows,
            num_columns=num_columns))

    grid_metadata_dict[LATITUDES_KEY] = grid_point_latitudes_deg
    grid_metadata_dict[LONGITUDES_KEY] = grid_point_longitudes_deg
    reflectivity_matrix_dbz[numpy.isnan(reflectivity_matrix_dbz)] = 0.

    print('Applying criterion 1 for convective classification...')
    convective_flag_matrix = _apply_convective_criterion1(
        reflectivity_matrix_dbz=reflectivity_matrix_dbz,
        peakedness_neigh_metres=peakedness_neigh_metres,
        max_peakedness_height_m_asl=max_peakedness_height_m_asl,
        min_height_fraction=min_height_fraction_for_peakedness,
        halve_resolution_for_peakedness=halve_resolution_for_peakedness,
        min_composite_refl_dbz=min_composite_refl_criterion1_dbz,
        grid_metadata_dict=grid_metadata_dict)

    print('Number of convective pixels = {0:d}'.format(
        numpy.sum(convective_flag_matrix)))

    print('Applying criterion 2 for convective classification...')
    convective_flag_matrix = _apply_convective_criterion2(
        reflectivity_matrix_dbz=reflectivity_matrix_dbz,
        convective_flag_matrix=convective_flag_matrix,
        grid_metadata_dict=grid_metadata_dict,
        valid_time_unix_sec=valid_time_unix_sec,
        min_composite_refl_aml_dbz=min_composite_refl_aml_dbz)

    print('Number of convective pixels = {0:d}'.format(
        numpy.sum(convective_flag_matrix)))

    print('Applying criterion 3 for convective classification...')
    convective_flag_matrix = _apply_convective_criterion3(
        reflectivity_matrix_dbz=reflectivity_matrix_dbz,
        convective_flag_matrix=convective_flag_matrix,
        grid_metadata_dict=grid_metadata_dict,
        min_echo_top_m_asl=min_echo_top_m_asl,
        echo_top_level_dbz=echo_top_level_dbz)

    print('Number of convective pixels = {0:d}'.format(
        numpy.sum(convective_flag_matrix)))

    print('Applying criterion 4 for convective classification...')
    convective_flag_matrix = _apply_convective_criterion4(
        convective_flag_matrix=convective_flag_matrix,
        min_size_pixels=min_size_pixels)

    print('Number of convective pixels = {0:d}'.format(
        numpy.sum(convective_flag_matrix)))

    print('Applying criterion 5 for convective classification...')
    convective_flag_matrix = _apply_convective_criterion5(
        reflectivity_matrix_dbz=reflectivity_matrix_dbz,
        convective_flag_matrix=convective_flag_matrix,
        min_composite_refl_dbz=min_composite_refl_criterion5_dbz)

    return convective_flag_matrix, option_dict
Example #8
0
def read_field_from_full_grid_file(netcdf_file_name,
                                   field_name=None,
                                   metadata_dict=None,
                                   raise_error_if_fails=True):
    """Reads one radar field from full-grid (not sparse-grid) file.

    This file should contain all radar variables for one time step.

    M = number of rows (unique grid-point latitudes)
    N = number of columns (unique grid-point longitudes)
    H = number of height levels (unique grid-point heights)

    :param netcdf_file_name: Path to input file.
    :param field_name: Name of radar field.
    :param metadata_dict: Dictionary created by
        read_metadata_from_full_grid_file.
    :param raise_error_if_fails: Boolean flag.  If True and file cannot be
        opened, this method will raise an error.  If False and file cannot be
        opened, will return None for all output variables.
    :return: field_matrix: H-by-M-by-N numpy array with values of radar field.
    :return: unique_grid_point_heights_m_asl: length-H numpy array of grid-point
        heights (metres above sea level).  If array is increasing
        (decreasing), height increases (decreases) with the first index of
        field_matrix.
    :return: unique_grid_point_lat_deg: length-M numpy array of grid-point
        latitudes (deg N).  If array is increasing (decreasing), latitude
        increases (decreases) with the second index of field_matrix.
    :return: unique_grid_point_lng_deg: length-N numpy array of grid-point
        longitudes (deg E).  If array is increasing (decreasing), longitude
        increases (decreases) with the third index of field_matrix.
    """

    error_checking.assert_file_exists(netcdf_file_name)
    netcdf_dataset = netcdf_io.open_netcdf(netcdf_file_name,
                                           raise_error_if_fails)
    if netcdf_dataset is None:
        return None, None, None, None

    field_name_orig = _field_name_new_to_orig(field_name)
    field_matrix = numpy.array(
        netcdf_dataset.variables[field_name_orig][0, :, :, :])

    min_latitude_deg = metadata_dict[radar_io.NW_GRID_POINT_LAT_COLUMN] - (
        metadata_dict[radar_io.LAT_SPACING_COLUMN] *
        (metadata_dict[radar_io.NUM_LAT_COLUMN] - 1))
    unique_grid_point_lat_deg, unique_grid_point_lng_deg = (
        grids.get_latlng_grid_points(
            min_latitude_deg=min_latitude_deg,
            min_longitude_deg=metadata_dict[radar_io.NW_GRID_POINT_LNG_COLUMN],
            lat_spacing_deg=metadata_dict[radar_io.LAT_SPACING_COLUMN],
            lng_spacing_deg=metadata_dict[radar_io.LNG_SPACING_COLUMN],
            num_rows=metadata_dict[radar_io.NUM_LAT_COLUMN],
            num_columns=metadata_dict[radar_io.NUM_LNG_COLUMN]))

    max_height_m_asl = metadata_dict[MIN_GRID_POINT_HEIGHT_COLUMN] + (
        metadata_dict[HEIGHT_SPACING_COLUMN] *
        (metadata_dict[NUM_HEIGHTS_COLUMN] - 1))
    unique_grid_point_heights_m_asl = numpy.linspace(
        metadata_dict[MIN_GRID_POINT_HEIGHT_COLUMN],
        max_height_m_asl,
        num=metadata_dict[NUM_HEIGHTS_COLUMN])

    netcdf_dataset.close()
    return (field_matrix, unique_grid_point_heights_m_asl,
            unique_grid_point_lat_deg, unique_grid_point_lng_deg)
Example #9
0
def write_field_to_myrorss_file(field_matrix,
                                netcdf_file_name,
                                field_name,
                                metadata_dict,
                                height_m_asl=None):
    """Writes field to MYRORSS-formatted file.

    M = number of rows (unique grid-point latitudes)
    N = number of columns (unique grid-point longitudes)

    :param field_matrix: M-by-N numpy array with one radar variable at one time.
        Latitude should increase down each column, and longitude should increase
        to the right along each row.
    :param netcdf_file_name: Path to output file.
    :param field_name: Name of radar field in GewitterGefahr format.
    :param metadata_dict: Dictionary created by either
        `gridrad_io.read_metadata_from_full_grid_file` or
        `read_metadata_from_raw_file`.
    :param height_m_asl: Height of radar field (metres above sea level).
    """

    if field_name == radar_utils.REFL_NAME:
        field_to_heights_dict_m_asl = (
            myrorss_and_mrms_utils.fields_and_refl_heights_to_dict(
                field_names=[field_name],
                data_source=radar_utils.MYRORSS_SOURCE_ID,
                refl_heights_m_asl=numpy.array([height_m_asl])))

    else:
        field_to_heights_dict_m_asl = (
            myrorss_and_mrms_utils.fields_and_refl_heights_to_dict(
                field_names=[field_name],
                data_source=radar_utils.MYRORSS_SOURCE_ID))

    field_name = list(field_to_heights_dict_m_asl.keys())[0]
    radar_height_m_asl = field_to_heights_dict_m_asl[field_name][0]

    if field_name in radar_utils.ECHO_TOP_NAMES:
        field_matrix = METRES_TO_KM * field_matrix
    field_name_myrorss = radar_utils.field_name_new_to_orig(
        field_name=field_name, data_source_name=radar_utils.MYRORSS_SOURCE_ID)

    file_system_utils.mkdir_recursive_if_necessary(file_name=netcdf_file_name)
    netcdf_dataset = Dataset(netcdf_file_name,
                             'w',
                             format='NETCDF3_64BIT_OFFSET')

    netcdf_dataset.setncattr(FIELD_NAME_COLUMN_ORIG, field_name_myrorss)
    netcdf_dataset.setncattr('DataType', 'SparseLatLonGrid')

    netcdf_dataset.setncattr(
        NW_GRID_POINT_LAT_COLUMN_ORIG,
        rounder.round_to_nearest(
            metadata_dict[radar_utils.NW_GRID_POINT_LAT_COLUMN],
            LATLNG_MULTIPLE_DEG))
    netcdf_dataset.setncattr(
        NW_GRID_POINT_LNG_COLUMN_ORIG,
        rounder.round_to_nearest(
            metadata_dict[radar_utils.NW_GRID_POINT_LNG_COLUMN],
            LATLNG_MULTIPLE_DEG))
    netcdf_dataset.setncattr(HEIGHT_COLUMN_ORIG,
                             METRES_TO_KM * numpy.float(radar_height_m_asl))
    netcdf_dataset.setncattr(
        UNIX_TIME_COLUMN_ORIG,
        numpy.int32(metadata_dict[radar_utils.UNIX_TIME_COLUMN]))
    netcdf_dataset.setncattr('FractionalTime', 0.)

    netcdf_dataset.setncattr('attributes', ' ColorMap SubType Unit')
    netcdf_dataset.setncattr('ColorMap-unit', 'dimensionless')
    netcdf_dataset.setncattr('ColorMap-value', '')
    netcdf_dataset.setncattr('SubType-unit', 'dimensionless')
    netcdf_dataset.setncattr('SubType-value', numpy.float(radar_height_m_asl))
    netcdf_dataset.setncattr('Unit-unit', 'dimensionless')
    netcdf_dataset.setncattr('Unit-value', 'dimensionless')

    netcdf_dataset.setncattr(
        LAT_SPACING_COLUMN_ORIG,
        rounder.round_to_nearest(metadata_dict[radar_utils.LAT_SPACING_COLUMN],
                                 LATLNG_MULTIPLE_DEG))
    netcdf_dataset.setncattr(
        LNG_SPACING_COLUMN_ORIG,
        rounder.round_to_nearest(metadata_dict[radar_utils.LNG_SPACING_COLUMN],
                                 LATLNG_MULTIPLE_DEG))
    netcdf_dataset.setncattr(SENTINEL_VALUE_COLUMNS_ORIG[0],
                             numpy.double(-99000.))
    netcdf_dataset.setncattr(SENTINEL_VALUE_COLUMNS_ORIG[1],
                             numpy.double(-99001.))

    min_latitude_deg = metadata_dict[radar_utils.NW_GRID_POINT_LAT_COLUMN] - (
        metadata_dict[radar_utils.LAT_SPACING_COLUMN] *
        (metadata_dict[radar_utils.NUM_LAT_COLUMN] - 1))
    unique_grid_point_lats_deg, unique_grid_point_lngs_deg = (
        grids.get_latlng_grid_points(
            min_latitude_deg=min_latitude_deg,
            min_longitude_deg=metadata_dict[
                radar_utils.NW_GRID_POINT_LNG_COLUMN],
            lat_spacing_deg=metadata_dict[radar_utils.LAT_SPACING_COLUMN],
            lng_spacing_deg=metadata_dict[radar_utils.LNG_SPACING_COLUMN],
            num_rows=metadata_dict[radar_utils.NUM_LAT_COLUMN],
            num_columns=metadata_dict[radar_utils.NUM_LNG_COLUMN]))

    num_grid_rows = len(unique_grid_point_lats_deg)
    num_grid_columns = len(unique_grid_point_lngs_deg)
    field_vector = numpy.reshape(field_matrix,
                                 num_grid_rows * num_grid_columns)

    grid_point_lat_matrix, grid_point_lng_matrix = (
        grids.latlng_vectors_to_matrices(unique_grid_point_lats_deg,
                                         unique_grid_point_lngs_deg))
    grid_point_lat_vector = numpy.reshape(grid_point_lat_matrix,
                                          num_grid_rows * num_grid_columns)
    grid_point_lng_vector = numpy.reshape(grid_point_lng_matrix,
                                          num_grid_rows * num_grid_columns)

    real_value_indices = numpy.where(numpy.invert(
        numpy.isnan(field_vector)))[0]
    netcdf_dataset.createDimension(NUM_LAT_COLUMN_ORIG, num_grid_rows - 1)
    netcdf_dataset.createDimension(NUM_LNG_COLUMN_ORIG, num_grid_columns - 1)
    netcdf_dataset.createDimension(NUM_PIXELS_COLUMN_ORIG,
                                   len(real_value_indices))

    row_index_vector, column_index_vector = radar_utils.latlng_to_rowcol(
        grid_point_lat_vector,
        grid_point_lng_vector,
        nw_grid_point_lat_deg=metadata_dict[
            radar_utils.NW_GRID_POINT_LAT_COLUMN],
        nw_grid_point_lng_deg=metadata_dict[
            radar_utils.NW_GRID_POINT_LNG_COLUMN],
        lat_spacing_deg=metadata_dict[radar_utils.LAT_SPACING_COLUMN],
        lng_spacing_deg=metadata_dict[radar_utils.LNG_SPACING_COLUMN])

    netcdf_dataset.createVariable(field_name_myrorss, numpy.single,
                                  (NUM_PIXELS_COLUMN_ORIG, ))
    netcdf_dataset.createVariable(GRID_ROW_COLUMN_ORIG, numpy.int16,
                                  (NUM_PIXELS_COLUMN_ORIG, ))
    netcdf_dataset.createVariable(GRID_COLUMN_COLUMN_ORIG, numpy.int16,
                                  (NUM_PIXELS_COLUMN_ORIG, ))
    netcdf_dataset.createVariable(NUM_GRID_CELL_COLUMN_ORIG, numpy.int32,
                                  (NUM_PIXELS_COLUMN_ORIG, ))

    netcdf_dataset.variables[field_name_myrorss].setncattr(
        'BackgroundValue', numpy.int32(-99900))
    netcdf_dataset.variables[field_name_myrorss].setncattr(
        'units', 'dimensionless')
    netcdf_dataset.variables[field_name_myrorss].setncattr(
        'NumValidRuns', numpy.int32(len(real_value_indices)))

    netcdf_dataset.variables[field_name_myrorss][:] = field_vector[
        real_value_indices]
    netcdf_dataset.variables[GRID_ROW_COLUMN_ORIG][:] = (
        row_index_vector[real_value_indices])
    netcdf_dataset.variables[GRID_COLUMN_COLUMN_ORIG][:] = (
        column_index_vector[real_value_indices])
    netcdf_dataset.variables[NUM_GRID_CELL_COLUMN_ORIG][:] = (numpy.full(
        len(real_value_indices), 1, dtype=int))

    netcdf_dataset.close()
    data_source=radar_utils.MYRORSS_SOURCE_ID,
    field_name=radar_utils.REFL_NAME)

LARGE_GRID_METADATA_DICT = {
    echo_classifn.MIN_LATITUDE_KEY: 20.,
    echo_classifn.LATITUDE_SPACING_KEY: 0.01,
    echo_classifn.MIN_LONGITUDE_KEY: 230.,
    echo_classifn.LONGITUDE_SPACING_KEY: 0.01,
    echo_classifn.HEIGHTS_KEY: LARGE_GRID_HEIGHTS_M_ASL
}

THESE_LATITUDES_DEG, THESE_LONGITUDES_DEG = grids.get_latlng_grid_points(
    min_latitude_deg=LARGE_GRID_METADATA_DICT[echo_classifn.MIN_LATITUDE_KEY],
    min_longitude_deg=LARGE_GRID_METADATA_DICT[
        echo_classifn.MIN_LONGITUDE_KEY],
    lat_spacing_deg=LARGE_GRID_METADATA_DICT[
        echo_classifn.LATITUDE_SPACING_KEY],
    lng_spacing_deg=LARGE_GRID_METADATA_DICT[
        echo_classifn.LONGITUDE_SPACING_KEY],
    num_rows=7001,
    num_columns=3501)

LARGE_GRID_METADATA_DICT[echo_classifn.LATITUDES_KEY] = THESE_LATITUDES_DEG
LARGE_GRID_METADATA_DICT[echo_classifn.LONGITUDES_KEY] = THESE_LONGITUDES_DEG

LARGE_RADIUS_METRES = 12000.
NUM_ROWS_IN_LARGE_NEIGH = 23
NUM_COLUMNS_IN_LARGE_NEIGH = 29

GRID_METADATA_DICT = {
    echo_classifn.MIN_LATITUDE_KEY: 35.1,
    echo_classifn.LATITUDE_SPACING_KEY: 0.2,
Example #11
0
def _check_grid_points(grid_point_latitudes_deg, grid_point_longitudes_deg,
                       metadata_dict):
    """Ensures that grid is regular in lat-long coordinates.

    M = number of rows (unique grid-point latitudes)
    N = number of columns (unique grid-point longitudes)

    :param grid_point_latitudes_deg: length-M numpy array of grid-point
        latitudes (deg N).
    :param grid_point_longitudes_deg: length-N numpy array of grid-point
        longitudes (deg E).
    :param metadata_dict: Dictionary created by
        `read_metadata_from_full_grid_file`.
    :raises: ValueError: if the grid is not regular in lat-long coordinates.
    """

    min_latitude_deg = metadata_dict[radar_utils.NW_GRID_POINT_LAT_COLUMN] - (
        metadata_dict[radar_utils.LAT_SPACING_COLUMN] *
        (metadata_dict[radar_utils.NUM_LAT_COLUMN] - 1))

    expected_latitudes_deg, expected_longitudes_deg = (
        grids.get_latlng_grid_points(
            min_latitude_deg=min_latitude_deg,
            min_longitude_deg=metadata_dict[
                radar_utils.NW_GRID_POINT_LNG_COLUMN],
            lat_spacing_deg=metadata_dict[radar_utils.LAT_SPACING_COLUMN],
            lng_spacing_deg=metadata_dict[radar_utils.LNG_SPACING_COLUMN],
            num_rows=metadata_dict[radar_utils.NUM_LAT_COLUMN],
            num_columns=metadata_dict[radar_utils.NUM_LNG_COLUMN]))

    if not numpy.allclose(grid_point_latitudes_deg,
                          expected_latitudes_deg,
                          atol=LATITUDE_TOLERANCE_DEG):

        for i in range(len(grid_point_latitudes_deg)):
            print(
                ('Expected latitude = {0:.4f} deg N ... actual = {1:.4f} deg N'
                 ).format(expected_latitudes_deg[i],
                          grid_point_latitudes_deg[i]))

        max_latitude_diff_deg = numpy.max(
            numpy.absolute(expected_latitudes_deg - grid_point_latitudes_deg))

        error_string = (
            '\n\nAs shown above, lat-long grid is irregular.  There is a max '
            'difference of {0:f} deg N between expected and actual latitudes.'
        ).format(max_latitude_diff_deg)

        raise ValueError(error_string)

    if not numpy.allclose(grid_point_longitudes_deg,
                          expected_longitudes_deg,
                          atol=LONGITUDE_TOLERANCE_DEG):

        for i in range(len(grid_point_longitudes_deg)):
            print((
                'Expected longitude = {0:.4f} deg E ... actual = {1:.4f} deg E'
            ).format(expected_longitudes_deg[i], grid_point_longitudes_deg[i]))

        max_longitude_diff_deg = numpy.max(
            numpy.absolute(expected_longitudes_deg -
                           grid_point_longitudes_deg))

        error_string = (
            '\n\nAs shown above, lat-long grid is irregular.  There is a max '
            'difference of {0:f} deg E between expected and actual longitudes.'
        ).format(max_longitude_diff_deg)

        raise ValueError(error_string)