Exemplo n.º 1
0
    def test_simple_polygon_to_grid_points(self):
        """Ensures correct output from simple_polygon_to_grid_points."""

        these_grid_point_rows, these_grid_point_columns = (
            polygons.simple_polygon_to_grid_points(VERTEX_ROWS_SIMPLE,
                                                   VERTEX_COLUMNS_SIMPLE))

        self.assertTrue(
            numpy.array_equal(these_grid_point_rows,
                              GRID_POINT_ROWS_IN_SIMPLE_POLY))
        self.assertTrue(
            numpy.array_equal(these_grid_point_columns,
                              GRID_POINT_COLUMNS_IN_SIMPLE_POLY))
def read_raw_file(raw_file_name):
    """Reads tracking data from raw (either JSON or ASCII) file.

    This file should contain all storm objects at one time step.

    :param raw_file_name: Path to input file.
    :return: storm_object_table: See documentation for
        `storm_tracking_io.write_processed_file`.
    """

    error_checking.assert_file_exists(raw_file_name)
    _, pathless_file_name = os.path.split(raw_file_name)
    _, file_extension = os.path.splitext(pathless_file_name)
    _check_raw_file_extension(file_extension)

    unix_time_sec = raw_file_name_to_time(raw_file_name)

    if file_extension == ASCII_FILE_EXTENSION:
        storm_ids = []
        east_velocities_m_s01 = []
        north_velocities_m_s01 = []
        list_of_latitude_vertex_arrays_deg = []
        list_of_longitude_vertex_arrays_deg = []

        for this_line in open(raw_file_name, 'r').readlines():
            these_words = this_line.split(':')
            if len(these_words) < MIN_WORDS_PER_ASCII_LINE:
                continue

            storm_ids.append(these_words[STORM_ID_INDEX_IN_ASCII_FILES])
            east_velocities_m_s01.append(
                float(these_words[U_MOTION_INDEX_IN_ASCII_FILES]))
            north_velocities_m_s01.append(
                -1 * float(these_words[V_MOTION_INDEX_IN_ASCII_FILES]))

            these_polygon_words = numpy.array(
                these_words[POLYGON_INDEX_IN_ASCII_FILES].split(','))
            these_latitude_words = these_polygon_words[
                LATITUDE_INDEX_IN_ASCII_FILES::2].tolist()
            these_longitude_words = these_polygon_words[
                LONGITUDE_INDEX_IN_ASCII_FILES::2].tolist()

            these_latitudes_deg = numpy.array(
                [float(w) for w in these_latitude_words])
            these_longitudes_deg = numpy.array(
                [float(w) for w in these_longitude_words])
            list_of_latitude_vertex_arrays_deg.append(these_latitudes_deg)
            list_of_longitude_vertex_arrays_deg.append(these_longitudes_deg)

        east_velocities_m_s01 = numpy.array(east_velocities_m_s01)
        north_velocities_m_s01 = numpy.array(north_velocities_m_s01)
        num_storms = len(storm_ids)

    else:
        with open(raw_file_name) as json_file_handle:
            probsevere_dict = json.load(json_file_handle)

        num_storms = len(probsevere_dict[FEATURES_KEY_IN_JSON_FILES])
        storm_ids = [None] * num_storms
        east_velocities_m_s01 = numpy.full(num_storms, numpy.nan)
        north_velocities_m_s01 = numpy.full(num_storms, numpy.nan)
        list_of_latitude_vertex_arrays_deg = [None] * num_storms
        list_of_longitude_vertex_arrays_deg = [None] * num_storms

        for i in range(num_storms):
            storm_ids[i] = str(
                probsevere_dict[FEATURES_KEY_IN_JSON_FILES][i]
                [PROPERTIES_KEY_IN_JSON_FILES][STORM_ID_KEY_IN_JSON_FILES])
            east_velocities_m_s01[i] = float(
                probsevere_dict[FEATURES_KEY_IN_JSON_FILES][i]
                [PROPERTIES_KEY_IN_JSON_FILES][U_MOTION_KEY_IN_JSON_FILES])
            north_velocities_m_s01[i] = -1 * float(
                probsevere_dict[FEATURES_KEY_IN_JSON_FILES][i]
                [PROPERTIES_KEY_IN_JSON_FILES][V_MOTION_KEY_IN_JSON_FILES])

            this_vertex_matrix_deg = numpy.array(
                probsevere_dict[FEATURES_KEY_IN_JSON_FILES][i]
                [GEOMETRY_KEY_IN_JSON_FILES][COORDINATES_KEY_IN_JSON_FILES][0])
            list_of_latitude_vertex_arrays_deg[i] = numpy.array(
                this_vertex_matrix_deg[:, LATITUDE_INDEX_IN_JSON_FILES])
            list_of_longitude_vertex_arrays_deg[i] = numpy.array(
                this_vertex_matrix_deg[:, LONGITUDE_INDEX_IN_JSON_FILES])

    spc_date_unix_sec = time_conversion.time_to_spc_date_unix_sec(
        unix_time_sec)
    unix_times_sec = numpy.full(num_storms, unix_time_sec, dtype=int)
    spc_dates_unix_sec = numpy.full(num_storms, spc_date_unix_sec, dtype=int)
    tracking_start_times_unix_sec = numpy.full(
        num_storms, DUMMY_TRACKING_START_TIME_UNIX_SEC, dtype=int)
    tracking_end_times_unix_sec = numpy.full(num_storms,
                                             DUMMY_TRACKING_END_TIME_UNIX_SEC,
                                             dtype=int)

    storm_object_dict = {
        tracking_utils.STORM_ID_COLUMN: storm_ids,
        tracking_utils.EAST_VELOCITY_COLUMN: east_velocities_m_s01,
        tracking_utils.NORTH_VELOCITY_COLUMN: north_velocities_m_s01,
        tracking_utils.TIME_COLUMN: unix_times_sec,
        tracking_utils.SPC_DATE_COLUMN: spc_dates_unix_sec,
        tracking_utils.TRACKING_START_TIME_COLUMN:
        tracking_start_times_unix_sec,
        tracking_utils.TRACKING_END_TIME_COLUMN: tracking_end_times_unix_sec
    }
    storm_object_table = pandas.DataFrame.from_dict(storm_object_dict)

    storm_ages_sec = numpy.full(num_storms, numpy.nan)
    simple_array = numpy.full(num_storms, numpy.nan)
    object_array = numpy.full(num_storms, numpy.nan, dtype=object)
    nested_array = storm_object_table[[
        tracking_utils.STORM_ID_COLUMN, tracking_utils.STORM_ID_COLUMN
    ]].values.tolist()

    argument_dict = {
        tracking_utils.AGE_COLUMN: storm_ages_sec,
        tracking_utils.CENTROID_LAT_COLUMN: simple_array,
        tracking_utils.CENTROID_LNG_COLUMN: simple_array,
        tracking_utils.GRID_POINT_LAT_COLUMN: nested_array,
        tracking_utils.GRID_POINT_LNG_COLUMN: nested_array,
        tracking_utils.GRID_POINT_ROW_COLUMN: nested_array,
        tracking_utils.GRID_POINT_COLUMN_COLUMN: nested_array,
        tracking_utils.POLYGON_OBJECT_LATLNG_COLUMN: object_array,
        tracking_utils.POLYGON_OBJECT_ROWCOL_COLUMN: object_array
    }
    storm_object_table = storm_object_table.assign(**argument_dict)

    for i in range(num_storms):
        these_vertex_rows, these_vertex_columns = (
            radar_utils.latlng_to_rowcol(
                latitudes_deg=list_of_latitude_vertex_arrays_deg[i],
                longitudes_deg=list_of_longitude_vertex_arrays_deg[i],
                nw_grid_point_lat_deg=NW_GRID_POINT_LAT_DEG,
                nw_grid_point_lng_deg=NW_GRID_POINT_LNG_DEG,
                lat_spacing_deg=GRID_LAT_SPACING_DEG,
                lng_spacing_deg=GRID_LNG_SPACING_DEG))

        these_vertex_rows, these_vertex_columns = (
            polygons.fix_probsevere_vertices(
                row_indices_orig=these_vertex_rows,
                column_indices_orig=these_vertex_columns))

        these_vertex_latitudes_deg, these_vertex_longitudes_deg = (
            radar_utils.rowcol_to_latlng(
                grid_rows=these_vertex_rows,
                grid_columns=these_vertex_columns,
                nw_grid_point_lat_deg=NW_GRID_POINT_LAT_DEG,
                nw_grid_point_lng_deg=NW_GRID_POINT_LNG_DEG,
                lat_spacing_deg=GRID_LAT_SPACING_DEG,
                lng_spacing_deg=GRID_LNG_SPACING_DEG))

        (storm_object_table[tracking_utils.GRID_POINT_ROW_COLUMN].values[i],
         storm_object_table[tracking_utils.GRID_POINT_COLUMN_COLUMN].values[i]
         ) = polygons.simple_polygon_to_grid_points(
             vertex_row_indices=these_vertex_rows,
             vertex_column_indices=these_vertex_columns)

        (storm_object_table[tracking_utils.GRID_POINT_LAT_COLUMN].values[i],
         storm_object_table[tracking_utils.GRID_POINT_LNG_COLUMN].values[i]
         ) = radar_utils.rowcol_to_latlng(
             grid_rows=storm_object_table[
                 tracking_utils.GRID_POINT_ROW_COLUMN].values[i],
             grid_columns=storm_object_table[
                 tracking_utils.GRID_POINT_COLUMN_COLUMN].values[i],
             nw_grid_point_lat_deg=NW_GRID_POINT_LAT_DEG,
             nw_grid_point_lng_deg=NW_GRID_POINT_LNG_DEG,
             lat_spacing_deg=GRID_LAT_SPACING_DEG,
             lng_spacing_deg=GRID_LNG_SPACING_DEG)

        (storm_object_table[tracking_utils.CENTROID_LAT_COLUMN].values[i],
         storm_object_table[tracking_utils.CENTROID_LNG_COLUMN].values[i]
         ) = geodetic_utils.get_latlng_centroid(
             latitudes_deg=these_vertex_latitudes_deg,
             longitudes_deg=these_vertex_longitudes_deg)

        storm_object_table[tracking_utils.POLYGON_OBJECT_ROWCOL_COLUMN].values[
            i] = polygons.vertex_arrays_to_polygon_object(
                exterior_x_coords=these_vertex_columns,
                exterior_y_coords=these_vertex_rows)

        storm_object_table[tracking_utils.POLYGON_OBJECT_LATLNG_COLUMN].values[
            i] = polygons.vertex_arrays_to_polygon_object(
                exterior_x_coords=these_vertex_longitudes_deg,
                exterior_y_coords=these_vertex_latitudes_deg)

    return storm_object_table
Exemplo n.º 3
0
def read_polygons_from_netcdf(netcdf_file_name,
                              metadata_dict=None,
                              spc_date_unix_sec=None,
                              tracking_start_time_unix_sec=None,
                              tracking_end_time_unix_sec=None,
                              raise_error_if_fails=True):
    """Reads storm polygons (outlines of storm cells) from NetCDF file.

    P = number of grid points in storm cell (different for each storm cell)
    V = number of vertices in storm polygon (different for each storm cell)

    If file cannot be opened, returns None.

    :param netcdf_file_name: Path to input file.
    :param metadata_dict: Dictionary with metadata for NetCDF file, created by
        `radar_io.read_metadata_from_raw_file`.
    :param spc_date_unix_sec: SPC date;
    :param tracking_start_time_unix_sec: Start time for tracking period.  This
        can be found by `get_start_end_times_for_spc_date`.
    :param tracking_end_time_unix_sec: End time for tracking period.  This can
        be found by `get_start_end_times_for_spc_date`.
    :param raise_error_if_fails: Boolean flag.  If True and file cannot be
        opened, this method will raise an error.
    :return: polygon_table: If file cannot be opened and raise_error_if_fails =
        False, this is None.  Otherwise, it is a pandas DataFrame with the
        following columns.
    polygon_table.storm_id: String ID for storm cell.
    polygon_table.unix_time_sec: Time in Unix format.
    polygon_table.spc_date_unix_sec: SPC date in Unix format.
    polygon_table.tracking_start_time_unix_sec: Start time for tracking period.
    polygon_table.tracking_end_time_unix_sec: End time for tracking period.
    polygon_table.centroid_lat_deg: Latitude at centroid of storm cell (deg N).
    polygon_table.centroid_lng_deg: Longitude at centroid of storm cell (deg E).
    polygon_table.grid_point_latitudes_deg: length-P numpy array with latitudes
        (deg N) of grid points in storm cell.
    polygon_table.grid_point_longitudes_deg: length-P numpy array with
        longitudes (deg E) of grid points in storm cell.
    polygon_table.grid_point_rows: length-P numpy array with row indices (all
        integers) of grid points in storm cell.
    polygon_table.grid_point_columns: length-P numpy array with column indices
        (all integers) of grid points in storm cell.
    polygon_table.polygon_object_latlng: Instance of `shapely.geometry.Polygon`
        with vertices in lat-long coordinates.
    polygon_table.polygon_object_rowcol: Instance of `shapely.geometry.Polygon`
        with vertices in row-column coordinates.
    """

    error_checking.assert_file_exists(netcdf_file_name)
    error_checking.assert_is_integer(spc_date_unix_sec)
    error_checking.assert_is_not_nan(spc_date_unix_sec)
    error_checking.assert_is_integer(tracking_start_time_unix_sec)
    error_checking.assert_is_not_nan(tracking_start_time_unix_sec)
    error_checking.assert_is_integer(tracking_end_time_unix_sec)
    error_checking.assert_is_not_nan(tracking_end_time_unix_sec)

    netcdf_dataset = netcdf_io.open_netcdf(netcdf_file_name,
                                           raise_error_if_fails)
    if netcdf_dataset is None:
        return None

    storm_id_var_name = metadata_dict[radar_io.FIELD_NAME_COLUMN]
    storm_id_var_name_orig = metadata_dict[radar_io.FIELD_NAME_COLUMN_ORIG]
    num_values = len(netcdf_dataset.variables[radar_io.GRID_ROW_COLUMN_ORIG])

    if num_values == 0:
        sparse_grid_dict = {
            radar_io.GRID_ROW_COLUMN: numpy.array([], dtype=int),
            radar_io.GRID_COLUMN_COLUMN: numpy.array([], dtype=int),
            radar_io.NUM_GRID_CELL_COLUMN: numpy.array([], dtype=int),
            storm_id_var_name: numpy.array([], dtype=int)
        }
    else:
        sparse_grid_dict = {
            radar_io.GRID_ROW_COLUMN:
            netcdf_dataset.variables[radar_io.GRID_ROW_COLUMN_ORIG][:],
            radar_io.GRID_COLUMN_COLUMN:
            netcdf_dataset.variables[radar_io.GRID_COLUMN_COLUMN_ORIG][:],
            radar_io.NUM_GRID_CELL_COLUMN:
            netcdf_dataset.variables[radar_io.NUM_GRID_CELL_COLUMN_ORIG][:],
            storm_id_var_name:
            netcdf_dataset.variables[storm_id_var_name_orig][:]
        }

    netcdf_dataset.close()
    sparse_grid_table = pandas.DataFrame.from_dict(sparse_grid_dict)
    numeric_storm_id_matrix, _, _ = (radar_s2f.sparse_to_full_grid(
        sparse_grid_table, metadata_dict))
    polygon_table = _storm_id_matrix_to_coord_lists(numeric_storm_id_matrix)

    num_storms = len(polygon_table.index)
    unix_times_sec = numpy.full(num_storms,
                                metadata_dict[radar_io.UNIX_TIME_COLUMN],
                                dtype=int)
    spc_dates_unix_sec = numpy.full(num_storms, spc_date_unix_sec, dtype=int)
    tracking_start_times_unix_sec = numpy.full(num_storms,
                                               tracking_start_time_unix_sec,
                                               dtype=int)
    tracking_end_times_unix_sec = numpy.full(num_storms,
                                             tracking_end_time_unix_sec,
                                             dtype=int)

    spc_date_string = time_conversion.time_to_spc_date_string(
        spc_date_unix_sec)
    storm_ids = _append_spc_date_to_storm_ids(
        polygon_table[tracking_io.STORM_ID_COLUMN].values, spc_date_string)

    simple_array = numpy.full(num_storms, numpy.nan)
    object_array = numpy.full(num_storms, numpy.nan, dtype=object)
    nested_array = polygon_table[[
        tracking_io.STORM_ID_COLUMN, tracking_io.STORM_ID_COLUMN
    ]].values.tolist()

    argument_dict = {
        tracking_io.STORM_ID_COLUMN: storm_ids,
        tracking_io.TIME_COLUMN: unix_times_sec,
        tracking_io.SPC_DATE_COLUMN: spc_dates_unix_sec,
        tracking_io.TRACKING_START_TIME_COLUMN: tracking_start_times_unix_sec,
        tracking_io.TRACKING_END_TIME_COLUMN: tracking_end_times_unix_sec,
        tracking_io.CENTROID_LAT_COLUMN: simple_array,
        tracking_io.CENTROID_LNG_COLUMN: simple_array,
        tracking_io.GRID_POINT_LAT_COLUMN: nested_array,
        tracking_io.GRID_POINT_LNG_COLUMN: nested_array,
        tracking_io.POLYGON_OBJECT_LATLNG_COLUMN: object_array,
        tracking_io.POLYGON_OBJECT_ROWCOL_COLUMN: object_array
    }
    polygon_table = polygon_table.assign(**argument_dict)

    for i in range(num_storms):
        these_vertex_rows, these_vertex_columns = (
            polygons.grid_points_in_poly_to_vertices(
                polygon_table[tracking_io.GRID_POINT_ROW_COLUMN].values[i],
                polygon_table[tracking_io.GRID_POINT_COLUMN_COLUMN].values[i]))

        (polygon_table[tracking_io.GRID_POINT_ROW_COLUMN].values[i],
         polygon_table[tracking_io.GRID_POINT_COLUMN_COLUMN].values[i]) = (
             polygons.simple_polygon_to_grid_points(these_vertex_rows,
                                                    these_vertex_columns))

        (polygon_table[tracking_io.GRID_POINT_LAT_COLUMN].values[i],
         polygon_table[tracking_io.GRID_POINT_LNG_COLUMN].values[i]) = (
             radar_io.rowcol_to_latlng(
                 polygon_table[tracking_io.GRID_POINT_ROW_COLUMN].values[i],
                 polygon_table[tracking_io.GRID_POINT_COLUMN_COLUMN].values[i],
                 nw_grid_point_lat_deg=metadata_dict[
                     radar_io.NW_GRID_POINT_LAT_COLUMN],
                 nw_grid_point_lng_deg=metadata_dict[
                     radar_io.NW_GRID_POINT_LNG_COLUMN],
                 lat_spacing_deg=metadata_dict[radar_io.LAT_SPACING_COLUMN],
                 lng_spacing_deg=metadata_dict[radar_io.LNG_SPACING_COLUMN]))

        these_vertex_lat_deg, these_vertex_lng_deg = radar_io.rowcol_to_latlng(
            these_vertex_rows,
            these_vertex_columns,
            nw_grid_point_lat_deg=metadata_dict[
                radar_io.NW_GRID_POINT_LAT_COLUMN],
            nw_grid_point_lng_deg=metadata_dict[
                radar_io.NW_GRID_POINT_LNG_COLUMN],
            lat_spacing_deg=metadata_dict[radar_io.LAT_SPACING_COLUMN],
            lng_spacing_deg=metadata_dict[radar_io.LNG_SPACING_COLUMN])

        (polygon_table[tracking_io.CENTROID_LAT_COLUMN].values[i],
         polygon_table[tracking_io.CENTROID_LNG_COLUMN].values[i]) = (
             polygons.get_latlng_centroid(these_vertex_lat_deg,
                                          these_vertex_lng_deg))

        polygon_table[tracking_io.POLYGON_OBJECT_ROWCOL_COLUMN].values[i] = (
            polygons.vertex_arrays_to_polygon_object(these_vertex_columns,
                                                     these_vertex_rows))
        polygon_table[tracking_io.POLYGON_OBJECT_LATLNG_COLUMN].values[i] = (
            polygons.vertex_arrays_to_polygon_object(these_vertex_lng_deg,
                                                     these_vertex_lat_deg))

    return polygon_table
Exemplo n.º 4
0
def read_storm_objects_from_raw_file(json_file_name):
    """Reads storm objects from raw file.

    This file should contain all storm objects for one tracking scale and one
    time step.

    P = number of grid points in given storm object
    V = number of vertices in bounding polygon of given storm object

    :param json_file_name: Path to input file.
    :return: storm_object_table: pandas DataFrame with the following columns.
    storm_object_table.storm_id: String ID for storm cell.
    storm_object_table.unix_time_sec: Time in Unix format.
    storm_object_table.spc_date_unix_sec: SPC date in Unix format.
    storm_object_table.tracking_start_time_unix_sec: Start time for tracking
        period.
    storm_object_table.tracking_end_time_unix_sec: End time for tracking
        period.
    storm_object_table.east_velocity_m_s01: Eastward velocity (m/s).
    storm_object_table.north_velocity_m_s01: Northward velocity (m/s).
    storm_object_table.age_sec: Age of storm cell (seconds).
    storm_object_table.centroid_lat_deg: Latitude at centroid of storm object
        (deg N).
    storm_object_table.centroid_lng_deg: Longitude at centroid of storm object
        (deg E).
    storm_object_table.grid_point_latitudes_deg: length-P numpy array with
        latitudes (deg N) of grid points in storm object.
    storm_object_table.grid_point_longitudes_deg: length-P numpy array with
        longitudes (deg E) of grid points in storm object.
    storm_object_table.grid_point_rows: length-P numpy array with row indices
        (integers) of grid points in storm object.
    storm_object_table.grid_point_columns: length-P numpy array with column
        indices (integers) of grid points in storm object.
    storm_object_table.polygon_object_latlng: Instance of
        `shapely.geometry.Polygon` with vertices in lat-long coordinates.
    storm_object_table.polygon_object_rowcol: Instance of
        `shapely.geometry.Polygon` with vertices in row-column coordinates.
    """

    error_checking.assert_file_exists(json_file_name)
    with open(json_file_name) as json_file_handle:
        probsevere_dict = json.load(json_file_handle)

    unix_time_sec = time_conversion.string_to_unix_sec(
        probsevere_dict[TIME_COLUMN_ORIG].encode('ascii', 'ignore'),
        TIME_FORMAT_IN_RAW_FILES)
    spc_date_unix_sec = time_conversion.time_to_spc_date_unix_sec(unix_time_sec)

    num_storms = len(probsevere_dict[FEATURES_COLUMN_ORIG])
    unix_times_sec = numpy.full(num_storms, unix_time_sec, dtype=int)
    spc_dates_unix_sec = numpy.full(num_storms, spc_date_unix_sec, dtype=int)
    tracking_start_times_unix_sec = numpy.full(
        num_storms, TRACKING_START_TIME_UNIX_SEC, dtype=int)
    tracking_end_times_unix_sec = numpy.full(
        num_storms, TRACKING_END_TIME_UNIX_SEC, dtype=int)

    storm_ids = [None] * num_storms
    east_velocities_m_s01 = numpy.full(num_storms, numpy.nan)
    north_velocities_m_s01 = numpy.full(num_storms, numpy.nan)

    for i in range(num_storms):
        storm_ids[i] = str(
            probsevere_dict[FEATURES_COLUMN_ORIG][i][PROPERTIES_COLUMN_ORIG][
                STORM_ID_COLUMN_ORIG])
        east_velocities_m_s01[i] = float(
            probsevere_dict[FEATURES_COLUMN_ORIG][i][PROPERTIES_COLUMN_ORIG][
                EAST_VELOCITY_COLUMN_ORIG])
        north_velocities_m_s01[i] = -1 * float(
            probsevere_dict[FEATURES_COLUMN_ORIG][i][PROPERTIES_COLUMN_ORIG][
                NORTH_VELOCITY_COLUMN_ORIG])

    storm_object_dict = {
        tracking_io.STORM_ID_COLUMN: storm_ids,
        tracking_io.EAST_VELOCITY_COLUMN: east_velocities_m_s01,
        tracking_io.NORTH_VELOCITY_COLUMN: north_velocities_m_s01,
        tracking_io.TIME_COLUMN: unix_times_sec,
        tracking_io.SPC_DATE_COLUMN: spc_dates_unix_sec,
        tracking_io.TRACKING_START_TIME_COLUMN: tracking_start_times_unix_sec,
        tracking_io.TRACKING_END_TIME_COLUMN: tracking_end_times_unix_sec}
    storm_object_table = pandas.DataFrame.from_dict(storm_object_dict)
    storm_object_table = tracking_io.remove_rows_with_nan(storm_object_table)

    num_storms = len(storm_object_table.index)
    storm_ages_sec = numpy.full(num_storms, numpy.nan)

    simple_array = numpy.full(num_storms, numpy.nan)
    object_array = numpy.full(num_storms, numpy.nan, dtype=object)
    nested_array = storm_object_table[[
        tracking_io.STORM_ID_COLUMN,
        tracking_io.STORM_ID_COLUMN]].values.tolist()

    argument_dict = {tracking_io.AGE_COLUMN: storm_ages_sec,
                     tracking_io.CENTROID_LAT_COLUMN: simple_array,
                     tracking_io.CENTROID_LNG_COLUMN: simple_array,
                     tracking_io.GRID_POINT_LAT_COLUMN: nested_array,
                     tracking_io.GRID_POINT_LNG_COLUMN: nested_array,
                     tracking_io.GRID_POINT_ROW_COLUMN: nested_array,
                     tracking_io.GRID_POINT_COLUMN_COLUMN: nested_array,
                     tracking_io.POLYGON_OBJECT_LATLNG_COLUMN: object_array,
                     tracking_io.POLYGON_OBJECT_ROWCOL_COLUMN: object_array}
    storm_object_table = storm_object_table.assign(**argument_dict)

    for i in range(num_storms):
        this_vertex_matrix_deg = numpy.asarray(
            probsevere_dict[FEATURES_COLUMN_ORIG][i][GEOMETRY_COLUMN_ORIG][
                COORDINATES_COLUMN_ORIG][0])
        these_vertex_lat_deg = this_vertex_matrix_deg[:, LAT_COLUMN_INDEX_ORIG]
        these_vertex_lng_deg = this_vertex_matrix_deg[:, LNG_COLUMN_INDEX_ORIG]

        (these_vertex_rows, these_vertex_columns) = radar_io.latlng_to_rowcol(
            these_vertex_lat_deg, these_vertex_lng_deg,
            nw_grid_point_lat_deg=NW_GRID_POINT_LAT_DEG,
            nw_grid_point_lng_deg=NW_GRID_POINT_LNG_DEG,
            lat_spacing_deg=GRID_LAT_SPACING_DEG,
            lng_spacing_deg=GRID_LNG_SPACING_DEG)

        these_vertex_rows, these_vertex_columns = (
            polygons.fix_probsevere_vertices(
                these_vertex_rows, these_vertex_columns))

        these_vertex_lat_deg, these_vertex_lng_deg = radar_io.rowcol_to_latlng(
            these_vertex_rows, these_vertex_columns,
            nw_grid_point_lat_deg=NW_GRID_POINT_LAT_DEG,
            nw_grid_point_lng_deg=NW_GRID_POINT_LNG_DEG,
            lat_spacing_deg=GRID_LAT_SPACING_DEG,
            lng_spacing_deg=GRID_LNG_SPACING_DEG)

        (storm_object_table[tracking_io.GRID_POINT_ROW_COLUMN].values[i],
         storm_object_table[tracking_io.GRID_POINT_COLUMN_COLUMN].values[i]) = (
             polygons.simple_polygon_to_grid_points(
                 these_vertex_rows, these_vertex_columns))

        (storm_object_table[tracking_io.GRID_POINT_LAT_COLUMN].values[i],
         storm_object_table[tracking_io.GRID_POINT_LNG_COLUMN].values[i]) = (
             radar_io.rowcol_to_latlng(
                 storm_object_table[tracking_io.GRID_POINT_ROW_COLUMN].values[i],
                 storm_object_table[
                     tracking_io.GRID_POINT_COLUMN_COLUMN].values[i],
                 nw_grid_point_lat_deg=NW_GRID_POINT_LAT_DEG,
                 nw_grid_point_lng_deg=NW_GRID_POINT_LNG_DEG,
                 lat_spacing_deg=GRID_LAT_SPACING_DEG,
                 lng_spacing_deg=GRID_LNG_SPACING_DEG))

        (storm_object_table[tracking_io.CENTROID_LAT_COLUMN].values[i],
         storm_object_table[tracking_io.CENTROID_LNG_COLUMN].values[i]) = (
             polygons.get_latlng_centroid(
                 these_vertex_lat_deg, these_vertex_lng_deg))

        storm_object_table[
            tracking_io.POLYGON_OBJECT_ROWCOL_COLUMN].values[i] = (
                polygons.vertex_arrays_to_polygon_object(
                    these_vertex_columns, these_vertex_rows))
        storm_object_table[
            tracking_io.POLYGON_OBJECT_LATLNG_COLUMN].values[i] = (
                polygons.vertex_arrays_to_polygon_object(
                    these_vertex_lng_deg, these_vertex_lat_deg))

    return storm_object_table
Exemplo n.º 5
0
def read_polygons_from_netcdf(netcdf_file_name,
                              metadata_dict,
                              spc_date_string,
                              tracking_start_time_unix_sec,
                              tracking_end_time_unix_sec,
                              raise_error_if_fails=True):
    """Reads storm polygons (outlines of storm cells) from NetCDF file.

    P = number of grid points in storm cell (different for each storm cell)
    V = number of vertices in storm polygon (different for each storm cell)

    If file cannot be opened, returns None.

    :param netcdf_file_name: Path to input file.
    :param metadata_dict: Dictionary with metadata for NetCDF file, created by
        `myrorss_and_mrms_io.read_metadata_from_raw_file`.
    :param spc_date_string: SPC date (format "yyyymmdd").
    :param tracking_start_time_unix_sec: Start time for tracking period.  This
        can be found by `get_start_end_times_for_spc_date`.
    :param tracking_end_time_unix_sec: End time for tracking period.  This can
        be found by `get_start_end_times_for_spc_date`.
    :param raise_error_if_fails: Boolean flag.  If True and file cannot be
        opened, this method will raise an error.
    :return: polygon_table: pandas DataFrame with the following columns.  Each
        row is one storm object.
    polygon_table.primary_id_string: See documentation for
        `storm_tracking_io.write_file`.
    polygon_table.valid_time_unix_sec: Same.
    polygon_table.spc_date_string: Same.
    polygon_table.tracking_start_time_unix_sec: Same.
    polygon_table.tracking_end_time_unix_sec: Same.
    polygon_table.centroid_latitude_deg: Same.
    polygon_table.centroid_longitude_deg: Same.
    polygon_table.grid_point_latitudes_deg: Same.
    polygon_table.grid_point_longitudes_deg: Same.
    polygon_table.grid_point_rows: Same.
    polygon_table.grid_point_columns: Same.
    polygon_table.polygon_object_latlng_deg: Same.
    polygon_table.polygon_object_rowcol: Same.
    """

    error_checking.assert_file_exists(netcdf_file_name)
    error_checking.assert_is_integer(tracking_start_time_unix_sec)
    error_checking.assert_is_not_nan(tracking_start_time_unix_sec)
    error_checking.assert_is_integer(tracking_end_time_unix_sec)
    error_checking.assert_is_not_nan(tracking_end_time_unix_sec)

    netcdf_dataset = netcdf_io.open_netcdf(netcdf_file_name,
                                           raise_error_if_fails)
    if netcdf_dataset is None:
        return None

    storm_id_column = metadata_dict[radar_utils.FIELD_NAME_COLUMN]
    storm_id_column_orig = metadata_dict[
        myrorss_and_mrms_io.FIELD_NAME_COLUMN_ORIG]
    num_values = len(
        netcdf_dataset.variables[myrorss_and_mrms_io.GRID_ROW_COLUMN_ORIG])

    if num_values == 0:
        sparse_grid_dict = {
            myrorss_and_mrms_io.GRID_ROW_COLUMN: numpy.array([], dtype=int),
            myrorss_and_mrms_io.GRID_COLUMN_COLUMN: numpy.array([], dtype=int),
            myrorss_and_mrms_io.NUM_GRID_CELL_COLUMN: numpy.array([],
                                                                  dtype=int),
            storm_id_column: numpy.array([], dtype=int)
        }
    else:
        sparse_grid_dict = {
            myrorss_and_mrms_io.GRID_ROW_COLUMN:
            netcdf_dataset.variables[myrorss_and_mrms_io.GRID_ROW_COLUMN_ORIG]
            [:],
            myrorss_and_mrms_io.GRID_COLUMN_COLUMN:
            netcdf_dataset.variables[
                myrorss_and_mrms_io.GRID_COLUMN_COLUMN_ORIG][:],
            myrorss_and_mrms_io.NUM_GRID_CELL_COLUMN:
            netcdf_dataset.variables[
                myrorss_and_mrms_io.NUM_GRID_CELL_COLUMN_ORIG][:],
            storm_id_column:
            netcdf_dataset.variables[storm_id_column_orig][:]
        }

    netcdf_dataset.close()

    sparse_grid_table = pandas.DataFrame.from_dict(sparse_grid_dict)
    numeric_id_matrix = radar_s2f.sparse_to_full_grid(sparse_grid_table,
                                                      metadata_dict)[0]

    polygon_table = _id_matrix_to_coord_lists(numeric_id_matrix)
    num_storms = len(polygon_table.index)

    valid_times_unix_sec = numpy.full(
        num_storms, metadata_dict[radar_utils.UNIX_TIME_COLUMN], dtype=int)
    spc_date_strings = num_storms * [
        time_conversion.time_to_spc_date_string(valid_times_unix_sec[0])
    ]

    tracking_start_times_unix_sec = numpy.full(num_storms,
                                               tracking_start_time_unix_sec,
                                               dtype=int)
    tracking_end_times_unix_sec = numpy.full(num_storms,
                                             tracking_end_time_unix_sec,
                                             dtype=int)

    simple_array = numpy.full(num_storms, numpy.nan)
    object_array = numpy.full(num_storms, numpy.nan, dtype=object)
    nested_array = polygon_table[[
        tracking_utils.PRIMARY_ID_COLUMN, tracking_utils.PRIMARY_ID_COLUMN
    ]].values.tolist()

    argument_dict = {
        tracking_utils.VALID_TIME_COLUMN: valid_times_unix_sec,
        tracking_utils.SPC_DATE_COLUMN: spc_date_strings,
        tracking_utils.TRACKING_START_TIME_COLUMN:
        tracking_start_times_unix_sec,
        tracking_utils.TRACKING_END_TIME_COLUMN: tracking_end_times_unix_sec,
        tracking_utils.CENTROID_LATITUDE_COLUMN: simple_array,
        tracking_utils.CENTROID_LONGITUDE_COLUMN: simple_array,
        tracking_utils.LATITUDES_IN_STORM_COLUMN: nested_array,
        tracking_utils.LONGITUDES_IN_STORM_COLUMN: nested_array,
        tracking_utils.LATLNG_POLYGON_COLUMN: object_array,
        tracking_utils.ROWCOL_POLYGON_COLUMN: object_array
    }

    polygon_table = polygon_table.assign(**argument_dict)

    for i in range(num_storms):
        these_vertex_rows, these_vertex_columns = (
            polygons.grid_points_in_poly_to_vertices(
                grid_point_row_indices=polygon_table[
                    tracking_utils.ROWS_IN_STORM_COLUMN].values[i],
                grid_point_column_indices=polygon_table[
                    tracking_utils.COLUMNS_IN_STORM_COLUMN].values[i]))

        (polygon_table[tracking_utils.ROWS_IN_STORM_COLUMN].values[i],
         polygon_table[tracking_utils.COLUMNS_IN_STORM_COLUMN].values[i]
         ) = polygons.simple_polygon_to_grid_points(
             vertex_row_indices=these_vertex_rows,
             vertex_column_indices=these_vertex_columns)

        (polygon_table[tracking_utils.LATITUDES_IN_STORM_COLUMN].values[i],
         polygon_table[tracking_utils.LONGITUDES_IN_STORM_COLUMN].values[i]
         ) = radar_utils.rowcol_to_latlng(
             grid_rows=polygon_table[
                 tracking_utils.ROWS_IN_STORM_COLUMN].values[i],
             grid_columns=polygon_table[
                 tracking_utils.COLUMNS_IN_STORM_COLUMN].values[i],
             nw_grid_point_lat_deg=metadata_dict[
                 radar_utils.NW_GRID_POINT_LAT_COLUMN],
             nw_grid_point_lng_deg=metadata_dict[
                 radar_utils.NW_GRID_POINT_LNG_COLUMN],
             lat_spacing_deg=metadata_dict[radar_utils.LAT_SPACING_COLUMN],
             lng_spacing_deg=metadata_dict[radar_utils.LNG_SPACING_COLUMN])

        these_vertex_lat_deg, these_vertex_lng_deg = (
            radar_utils.rowcol_to_latlng(
                grid_rows=these_vertex_rows,
                grid_columns=these_vertex_columns,
                nw_grid_point_lat_deg=metadata_dict[
                    radar_utils.NW_GRID_POINT_LAT_COLUMN],
                nw_grid_point_lng_deg=metadata_dict[
                    radar_utils.NW_GRID_POINT_LNG_COLUMN],
                lat_spacing_deg=metadata_dict[radar_utils.LAT_SPACING_COLUMN],
                lng_spacing_deg=metadata_dict[radar_utils.LNG_SPACING_COLUMN]))

        (polygon_table[tracking_utils.CENTROID_LATITUDE_COLUMN].values[i],
         polygon_table[tracking_utils.CENTROID_LONGITUDE_COLUMN].values[i]
         ) = geodetic_utils.get_latlng_centroid(
             latitudes_deg=these_vertex_lat_deg,
             longitudes_deg=these_vertex_lng_deg)

        polygon_table[tracking_utils.ROWCOL_POLYGON_COLUMN].values[i] = (
            polygons.vertex_arrays_to_polygon_object(
                exterior_x_coords=these_vertex_columns,
                exterior_y_coords=these_vertex_rows))

        polygon_table[tracking_utils.LATLNG_POLYGON_COLUMN].values[i] = (
            polygons.vertex_arrays_to_polygon_object(
                exterior_x_coords=these_vertex_lng_deg,
                exterior_y_coords=these_vertex_lat_deg))

    primary_id_strings = _append_spc_date_to_storm_ids(
        primary_id_strings=polygon_table[
            tracking_utils.PRIMARY_ID_COLUMN].values,
        spc_date_string=spc_date_string)

    return polygon_table.assign(
        **{tracking_utils.PRIMARY_ID_COLUMN: primary_id_strings})