def test_latlng_to_rowcol(self):
        """Ensures correct output from latlng_to_rowcol."""

        (these_row_indices, these_column_indices) = radar_utils.latlng_to_rowcol(
            GRID_POINT_LATITUDES_DEG, GRID_POINT_LONGITUDES_DEG,
            nw_grid_point_lat_deg=NW_GRID_POINT_LAT_DEG,
            nw_grid_point_lng_deg=NW_GRID_POINT_LNG_DEG,
            lat_spacing_deg=LAT_SPACING_DEG, lng_spacing_deg=LNG_SPACING_DEG)

        self.assertTrue(numpy.allclose(
            these_row_indices, GRID_ROW_INDICES, atol=TOLERANCE))
        self.assertTrue(numpy.allclose(
            these_column_indices, GRID_COLUMN_INDICES, atol=TOLERANCE))
def read_raw_file(raw_file_name):
    """Reads tracking data from raw (either JSON or ASCII) file.

    This file should contain all storm objects at one time step.

    :param raw_file_name: Path to input file.
    :return: storm_object_table: See documentation for
        `storm_tracking_io.write_processed_file`.
    """

    error_checking.assert_file_exists(raw_file_name)
    _, pathless_file_name = os.path.split(raw_file_name)
    _, file_extension = os.path.splitext(pathless_file_name)
    _check_raw_file_extension(file_extension)

    unix_time_sec = raw_file_name_to_time(raw_file_name)

    if file_extension == ASCII_FILE_EXTENSION:
        storm_ids = []
        east_velocities_m_s01 = []
        north_velocities_m_s01 = []
        list_of_latitude_vertex_arrays_deg = []
        list_of_longitude_vertex_arrays_deg = []

        for this_line in open(raw_file_name, 'r').readlines():
            these_words = this_line.split(':')
            if len(these_words) < MIN_WORDS_PER_ASCII_LINE:
                continue

            storm_ids.append(these_words[STORM_ID_INDEX_IN_ASCII_FILES])
            east_velocities_m_s01.append(
                float(these_words[U_MOTION_INDEX_IN_ASCII_FILES]))
            north_velocities_m_s01.append(
                -1 * float(these_words[V_MOTION_INDEX_IN_ASCII_FILES]))

            these_polygon_words = numpy.array(
                these_words[POLYGON_INDEX_IN_ASCII_FILES].split(','))
            these_latitude_words = these_polygon_words[
                LATITUDE_INDEX_IN_ASCII_FILES::2].tolist()
            these_longitude_words = these_polygon_words[
                LONGITUDE_INDEX_IN_ASCII_FILES::2].tolist()

            these_latitudes_deg = numpy.array(
                [float(w) for w in these_latitude_words])
            these_longitudes_deg = numpy.array(
                [float(w) for w in these_longitude_words])
            list_of_latitude_vertex_arrays_deg.append(these_latitudes_deg)
            list_of_longitude_vertex_arrays_deg.append(these_longitudes_deg)

        east_velocities_m_s01 = numpy.array(east_velocities_m_s01)
        north_velocities_m_s01 = numpy.array(north_velocities_m_s01)
        num_storms = len(storm_ids)

    else:
        with open(raw_file_name) as json_file_handle:
            probsevere_dict = json.load(json_file_handle)

        num_storms = len(probsevere_dict[FEATURES_KEY_IN_JSON_FILES])
        storm_ids = [None] * num_storms
        east_velocities_m_s01 = numpy.full(num_storms, numpy.nan)
        north_velocities_m_s01 = numpy.full(num_storms, numpy.nan)
        list_of_latitude_vertex_arrays_deg = [None] * num_storms
        list_of_longitude_vertex_arrays_deg = [None] * num_storms

        for i in range(num_storms):
            storm_ids[i] = str(
                probsevere_dict[FEATURES_KEY_IN_JSON_FILES][i]
                [PROPERTIES_KEY_IN_JSON_FILES][STORM_ID_KEY_IN_JSON_FILES])
            east_velocities_m_s01[i] = float(
                probsevere_dict[FEATURES_KEY_IN_JSON_FILES][i]
                [PROPERTIES_KEY_IN_JSON_FILES][U_MOTION_KEY_IN_JSON_FILES])
            north_velocities_m_s01[i] = -1 * float(
                probsevere_dict[FEATURES_KEY_IN_JSON_FILES][i]
                [PROPERTIES_KEY_IN_JSON_FILES][V_MOTION_KEY_IN_JSON_FILES])

            this_vertex_matrix_deg = numpy.array(
                probsevere_dict[FEATURES_KEY_IN_JSON_FILES][i]
                [GEOMETRY_KEY_IN_JSON_FILES][COORDINATES_KEY_IN_JSON_FILES][0])
            list_of_latitude_vertex_arrays_deg[i] = numpy.array(
                this_vertex_matrix_deg[:, LATITUDE_INDEX_IN_JSON_FILES])
            list_of_longitude_vertex_arrays_deg[i] = numpy.array(
                this_vertex_matrix_deg[:, LONGITUDE_INDEX_IN_JSON_FILES])

    spc_date_unix_sec = time_conversion.time_to_spc_date_unix_sec(
        unix_time_sec)
    unix_times_sec = numpy.full(num_storms, unix_time_sec, dtype=int)
    spc_dates_unix_sec = numpy.full(num_storms, spc_date_unix_sec, dtype=int)
    tracking_start_times_unix_sec = numpy.full(
        num_storms, DUMMY_TRACKING_START_TIME_UNIX_SEC, dtype=int)
    tracking_end_times_unix_sec = numpy.full(num_storms,
                                             DUMMY_TRACKING_END_TIME_UNIX_SEC,
                                             dtype=int)

    storm_object_dict = {
        tracking_utils.STORM_ID_COLUMN: storm_ids,
        tracking_utils.EAST_VELOCITY_COLUMN: east_velocities_m_s01,
        tracking_utils.NORTH_VELOCITY_COLUMN: north_velocities_m_s01,
        tracking_utils.TIME_COLUMN: unix_times_sec,
        tracking_utils.SPC_DATE_COLUMN: spc_dates_unix_sec,
        tracking_utils.TRACKING_START_TIME_COLUMN:
        tracking_start_times_unix_sec,
        tracking_utils.TRACKING_END_TIME_COLUMN: tracking_end_times_unix_sec
    }
    storm_object_table = pandas.DataFrame.from_dict(storm_object_dict)

    storm_ages_sec = numpy.full(num_storms, numpy.nan)
    simple_array = numpy.full(num_storms, numpy.nan)
    object_array = numpy.full(num_storms, numpy.nan, dtype=object)
    nested_array = storm_object_table[[
        tracking_utils.STORM_ID_COLUMN, tracking_utils.STORM_ID_COLUMN
    ]].values.tolist()

    argument_dict = {
        tracking_utils.AGE_COLUMN: storm_ages_sec,
        tracking_utils.CENTROID_LAT_COLUMN: simple_array,
        tracking_utils.CENTROID_LNG_COLUMN: simple_array,
        tracking_utils.GRID_POINT_LAT_COLUMN: nested_array,
        tracking_utils.GRID_POINT_LNG_COLUMN: nested_array,
        tracking_utils.GRID_POINT_ROW_COLUMN: nested_array,
        tracking_utils.GRID_POINT_COLUMN_COLUMN: nested_array,
        tracking_utils.POLYGON_OBJECT_LATLNG_COLUMN: object_array,
        tracking_utils.POLYGON_OBJECT_ROWCOL_COLUMN: object_array
    }
    storm_object_table = storm_object_table.assign(**argument_dict)

    for i in range(num_storms):
        these_vertex_rows, these_vertex_columns = (
            radar_utils.latlng_to_rowcol(
                latitudes_deg=list_of_latitude_vertex_arrays_deg[i],
                longitudes_deg=list_of_longitude_vertex_arrays_deg[i],
                nw_grid_point_lat_deg=NW_GRID_POINT_LAT_DEG,
                nw_grid_point_lng_deg=NW_GRID_POINT_LNG_DEG,
                lat_spacing_deg=GRID_LAT_SPACING_DEG,
                lng_spacing_deg=GRID_LNG_SPACING_DEG))

        these_vertex_rows, these_vertex_columns = (
            polygons.fix_probsevere_vertices(
                row_indices_orig=these_vertex_rows,
                column_indices_orig=these_vertex_columns))

        these_vertex_latitudes_deg, these_vertex_longitudes_deg = (
            radar_utils.rowcol_to_latlng(
                grid_rows=these_vertex_rows,
                grid_columns=these_vertex_columns,
                nw_grid_point_lat_deg=NW_GRID_POINT_LAT_DEG,
                nw_grid_point_lng_deg=NW_GRID_POINT_LNG_DEG,
                lat_spacing_deg=GRID_LAT_SPACING_DEG,
                lng_spacing_deg=GRID_LNG_SPACING_DEG))

        (storm_object_table[tracking_utils.GRID_POINT_ROW_COLUMN].values[i],
         storm_object_table[tracking_utils.GRID_POINT_COLUMN_COLUMN].values[i]
         ) = polygons.simple_polygon_to_grid_points(
             vertex_row_indices=these_vertex_rows,
             vertex_column_indices=these_vertex_columns)

        (storm_object_table[tracking_utils.GRID_POINT_LAT_COLUMN].values[i],
         storm_object_table[tracking_utils.GRID_POINT_LNG_COLUMN].values[i]
         ) = radar_utils.rowcol_to_latlng(
             grid_rows=storm_object_table[
                 tracking_utils.GRID_POINT_ROW_COLUMN].values[i],
             grid_columns=storm_object_table[
                 tracking_utils.GRID_POINT_COLUMN_COLUMN].values[i],
             nw_grid_point_lat_deg=NW_GRID_POINT_LAT_DEG,
             nw_grid_point_lng_deg=NW_GRID_POINT_LNG_DEG,
             lat_spacing_deg=GRID_LAT_SPACING_DEG,
             lng_spacing_deg=GRID_LNG_SPACING_DEG)

        (storm_object_table[tracking_utils.CENTROID_LAT_COLUMN].values[i],
         storm_object_table[tracking_utils.CENTROID_LNG_COLUMN].values[i]
         ) = geodetic_utils.get_latlng_centroid(
             latitudes_deg=these_vertex_latitudes_deg,
             longitudes_deg=these_vertex_longitudes_deg)

        storm_object_table[tracking_utils.POLYGON_OBJECT_ROWCOL_COLUMN].values[
            i] = polygons.vertex_arrays_to_polygon_object(
                exterior_x_coords=these_vertex_columns,
                exterior_y_coords=these_vertex_rows)

        storm_object_table[tracking_utils.POLYGON_OBJECT_LATLNG_COLUMN].values[
            i] = polygons.vertex_arrays_to_polygon_object(
                exterior_x_coords=these_vertex_longitudes_deg,
                exterior_y_coords=these_vertex_latitudes_deg)

    return storm_object_table
Example #3
0
def get_grid_points_in_storm_objects(storm_object_table,
                                     orig_grid_metadata_dict,
                                     new_grid_metadata_dict):
    """Finds grid points inside each storm object.

    :param storm_object_table: pandas DataFrame with columns specified by
        `storm_tracking_io.write_file`.
    :param orig_grid_metadata_dict: Dictionary with the following keys,
        describing radar grid used to create storm objects.
    orig_grid_metadata_dict['nw_grid_point_lat_deg']: Latitude (deg N) of
        northwesternmost grid point.
    orig_grid_metadata_dict['nw_grid_point_lng_deg']: Longitude (deg E) of
        northwesternmost grid point.
    orig_grid_metadata_dict['lat_spacing_deg']: Spacing (deg N) between adjacent
        rows.
    orig_grid_metadata_dict['lng_spacing_deg']: Spacing (deg E) between adjacent
        columns.
    orig_grid_metadata_dict['num_lat_in_grid']: Number of rows (unique grid-
        point latitudes).
    orig_grid_metadata_dict['num_lng_in_grid']: Number of columns (unique grid-
        point longitudes).

    :param new_grid_metadata_dict: Same as `orig_grid_metadata_dict`, except for
        new radar grid.  We want to know grid points inside each storm object
        for the new grid.
    :return: storm_object_to_grid_points_table: pandas DataFrame with the
        following columns.  Each row is one storm object.
    storm_object_to_grid_points_table.full_id_string: String ID for storm cell.
    storm_object_to_grid_points_table.grid_point_rows: 1-D numpy array with row
        indices (integers) of grid points in storm object.
    storm_object_to_grid_points_table.grid_point_columns: 1-D numpy array with
        column indices (integers) of grid points in storm object.
    """

    if are_grids_equal(orig_grid_metadata_dict, new_grid_metadata_dict):
        return storm_object_table[STORM_OBJECT_TO_GRID_PTS_COLUMNS]

    storm_object_to_grid_points_table = storm_object_table[
        STORM_OBJECT_TO_GRID_PTS_COLUMNS + GRID_POINT_LATLNG_COLUMNS]

    num_storm_objects = len(storm_object_to_grid_points_table.index)

    for i in range(num_storm_objects):
        these_grid_rows, these_grid_columns = radar_utils.latlng_to_rowcol(
            latitudes_deg=storm_object_to_grid_points_table[
                tracking_utils.LATITUDES_IN_STORM_COLUMN].values[i],
            longitudes_deg=storm_object_to_grid_points_table[
                tracking_utils.LONGITUDES_IN_STORM_COLUMN].values[i],
            nw_grid_point_lat_deg=new_grid_metadata_dict[
                radar_utils.NW_GRID_POINT_LAT_COLUMN],
            nw_grid_point_lng_deg=new_grid_metadata_dict[
                radar_utils.NW_GRID_POINT_LNG_COLUMN],
            lat_spacing_deg=new_grid_metadata_dict[
                radar_utils.LAT_SPACING_COLUMN],
            lng_spacing_deg=new_grid_metadata_dict[
                radar_utils.LNG_SPACING_COLUMN])

        storm_object_to_grid_points_table[
            tracking_utils.ROWS_IN_STORM_COLUMN].values[i] = these_grid_rows

        storm_object_to_grid_points_table[
            tracking_utils.
            COLUMNS_IN_STORM_COLUMN].values[i] = these_grid_columns

    return storm_object_to_grid_points_table[STORM_OBJECT_TO_GRID_PTS_COLUMNS]
Example #4
0
def write_field_to_myrorss_file(field_matrix,
                                netcdf_file_name,
                                field_name,
                                metadata_dict,
                                height_m_asl=None):
    """Writes field to MYRORSS-formatted file.

    M = number of rows (unique grid-point latitudes)
    N = number of columns (unique grid-point longitudes)

    :param field_matrix: M-by-N numpy array with one radar variable at one time.
        Latitude should increase down each column, and longitude should increase
        to the right along each row.
    :param netcdf_file_name: Path to output file.
    :param field_name: Name of radar field in GewitterGefahr format.
    :param metadata_dict: Dictionary created by either
        `gridrad_io.read_metadata_from_full_grid_file` or
        `read_metadata_from_raw_file`.
    :param height_m_asl: Height of radar field (metres above sea level).
    """

    if field_name == radar_utils.REFL_NAME:
        field_to_heights_dict_m_asl = (
            myrorss_and_mrms_utils.fields_and_refl_heights_to_dict(
                field_names=[field_name],
                data_source=radar_utils.MYRORSS_SOURCE_ID,
                refl_heights_m_asl=numpy.array([height_m_asl])))

    else:
        field_to_heights_dict_m_asl = (
            myrorss_and_mrms_utils.fields_and_refl_heights_to_dict(
                field_names=[field_name],
                data_source=radar_utils.MYRORSS_SOURCE_ID))

    field_name = list(field_to_heights_dict_m_asl.keys())[0]
    radar_height_m_asl = field_to_heights_dict_m_asl[field_name][0]

    if field_name in radar_utils.ECHO_TOP_NAMES:
        field_matrix = METRES_TO_KM * field_matrix
    field_name_myrorss = radar_utils.field_name_new_to_orig(
        field_name=field_name, data_source_name=radar_utils.MYRORSS_SOURCE_ID)

    file_system_utils.mkdir_recursive_if_necessary(file_name=netcdf_file_name)
    netcdf_dataset = Dataset(netcdf_file_name,
                             'w',
                             format='NETCDF3_64BIT_OFFSET')

    netcdf_dataset.setncattr(FIELD_NAME_COLUMN_ORIG, field_name_myrorss)
    netcdf_dataset.setncattr('DataType', 'SparseLatLonGrid')

    netcdf_dataset.setncattr(
        NW_GRID_POINT_LAT_COLUMN_ORIG,
        rounder.round_to_nearest(
            metadata_dict[radar_utils.NW_GRID_POINT_LAT_COLUMN],
            LATLNG_MULTIPLE_DEG))
    netcdf_dataset.setncattr(
        NW_GRID_POINT_LNG_COLUMN_ORIG,
        rounder.round_to_nearest(
            metadata_dict[radar_utils.NW_GRID_POINT_LNG_COLUMN],
            LATLNG_MULTIPLE_DEG))
    netcdf_dataset.setncattr(HEIGHT_COLUMN_ORIG,
                             METRES_TO_KM * numpy.float(radar_height_m_asl))
    netcdf_dataset.setncattr(
        UNIX_TIME_COLUMN_ORIG,
        numpy.int32(metadata_dict[radar_utils.UNIX_TIME_COLUMN]))
    netcdf_dataset.setncattr('FractionalTime', 0.)

    netcdf_dataset.setncattr('attributes', ' ColorMap SubType Unit')
    netcdf_dataset.setncattr('ColorMap-unit', 'dimensionless')
    netcdf_dataset.setncattr('ColorMap-value', '')
    netcdf_dataset.setncattr('SubType-unit', 'dimensionless')
    netcdf_dataset.setncattr('SubType-value', numpy.float(radar_height_m_asl))
    netcdf_dataset.setncattr('Unit-unit', 'dimensionless')
    netcdf_dataset.setncattr('Unit-value', 'dimensionless')

    netcdf_dataset.setncattr(
        LAT_SPACING_COLUMN_ORIG,
        rounder.round_to_nearest(metadata_dict[radar_utils.LAT_SPACING_COLUMN],
                                 LATLNG_MULTIPLE_DEG))
    netcdf_dataset.setncattr(
        LNG_SPACING_COLUMN_ORIG,
        rounder.round_to_nearest(metadata_dict[radar_utils.LNG_SPACING_COLUMN],
                                 LATLNG_MULTIPLE_DEG))
    netcdf_dataset.setncattr(SENTINEL_VALUE_COLUMNS_ORIG[0],
                             numpy.double(-99000.))
    netcdf_dataset.setncattr(SENTINEL_VALUE_COLUMNS_ORIG[1],
                             numpy.double(-99001.))

    min_latitude_deg = metadata_dict[radar_utils.NW_GRID_POINT_LAT_COLUMN] - (
        metadata_dict[radar_utils.LAT_SPACING_COLUMN] *
        (metadata_dict[radar_utils.NUM_LAT_COLUMN] - 1))
    unique_grid_point_lats_deg, unique_grid_point_lngs_deg = (
        grids.get_latlng_grid_points(
            min_latitude_deg=min_latitude_deg,
            min_longitude_deg=metadata_dict[
                radar_utils.NW_GRID_POINT_LNG_COLUMN],
            lat_spacing_deg=metadata_dict[radar_utils.LAT_SPACING_COLUMN],
            lng_spacing_deg=metadata_dict[radar_utils.LNG_SPACING_COLUMN],
            num_rows=metadata_dict[radar_utils.NUM_LAT_COLUMN],
            num_columns=metadata_dict[radar_utils.NUM_LNG_COLUMN]))

    num_grid_rows = len(unique_grid_point_lats_deg)
    num_grid_columns = len(unique_grid_point_lngs_deg)
    field_vector = numpy.reshape(field_matrix,
                                 num_grid_rows * num_grid_columns)

    grid_point_lat_matrix, grid_point_lng_matrix = (
        grids.latlng_vectors_to_matrices(unique_grid_point_lats_deg,
                                         unique_grid_point_lngs_deg))
    grid_point_lat_vector = numpy.reshape(grid_point_lat_matrix,
                                          num_grid_rows * num_grid_columns)
    grid_point_lng_vector = numpy.reshape(grid_point_lng_matrix,
                                          num_grid_rows * num_grid_columns)

    real_value_indices = numpy.where(numpy.invert(
        numpy.isnan(field_vector)))[0]
    netcdf_dataset.createDimension(NUM_LAT_COLUMN_ORIG, num_grid_rows - 1)
    netcdf_dataset.createDimension(NUM_LNG_COLUMN_ORIG, num_grid_columns - 1)
    netcdf_dataset.createDimension(NUM_PIXELS_COLUMN_ORIG,
                                   len(real_value_indices))

    row_index_vector, column_index_vector = radar_utils.latlng_to_rowcol(
        grid_point_lat_vector,
        grid_point_lng_vector,
        nw_grid_point_lat_deg=metadata_dict[
            radar_utils.NW_GRID_POINT_LAT_COLUMN],
        nw_grid_point_lng_deg=metadata_dict[
            radar_utils.NW_GRID_POINT_LNG_COLUMN],
        lat_spacing_deg=metadata_dict[radar_utils.LAT_SPACING_COLUMN],
        lng_spacing_deg=metadata_dict[radar_utils.LNG_SPACING_COLUMN])

    netcdf_dataset.createVariable(field_name_myrorss, numpy.single,
                                  (NUM_PIXELS_COLUMN_ORIG, ))
    netcdf_dataset.createVariable(GRID_ROW_COLUMN_ORIG, numpy.int16,
                                  (NUM_PIXELS_COLUMN_ORIG, ))
    netcdf_dataset.createVariable(GRID_COLUMN_COLUMN_ORIG, numpy.int16,
                                  (NUM_PIXELS_COLUMN_ORIG, ))
    netcdf_dataset.createVariable(NUM_GRID_CELL_COLUMN_ORIG, numpy.int32,
                                  (NUM_PIXELS_COLUMN_ORIG, ))

    netcdf_dataset.variables[field_name_myrorss].setncattr(
        'BackgroundValue', numpy.int32(-99900))
    netcdf_dataset.variables[field_name_myrorss].setncattr(
        'units', 'dimensionless')
    netcdf_dataset.variables[field_name_myrorss].setncattr(
        'NumValidRuns', numpy.int32(len(real_value_indices)))

    netcdf_dataset.variables[field_name_myrorss][:] = field_vector[
        real_value_indices]
    netcdf_dataset.variables[GRID_ROW_COLUMN_ORIG][:] = (
        row_index_vector[real_value_indices])
    netcdf_dataset.variables[GRID_COLUMN_COLUMN_ORIG][:] = (
        column_index_vector[real_value_indices])
    netcdf_dataset.variables[NUM_GRID_CELL_COLUMN_ORIG][:] = (numpy.full(
        len(real_value_indices), 1, dtype=int))

    netcdf_dataset.close()