def test_get_latlng_centroid(self): """Ensures correct output from get_latlng_centroid.""" (this_centroid_lat_deg, this_centroid_lng_deg) = geodetic_utils.get_latlng_centroid( latitudes_deg=POINT_LATITUDES_DEG, longitudes_deg=POINT_LONGITUDES_DEG, allow_nan=True) self.assertTrue( numpy.isclose(this_centroid_lat_deg, CENTROID_LATITUDE_DEG, atol=DEFAULT_TOLERANCE)) self.assertTrue( numpy.isclose(this_centroid_lng_deg, CENTROID_LONGITUDE_DEG, atol=DEFAULT_TOLERANCE))
def create_distance_buffers(storm_object_table, min_distances_metres, max_distances_metres): """Creates one or more distance buffers around each storm object. K = number of buffers :param storm_object_table: pandas DataFrame with the following columns. Each row is one storm object. storm_object_table.centroid_latitude_deg: Latitude (deg N) of storm-object centroid. storm_object_table.centroid_longitude_deg: Longitude (deg E) of storm-object centroid. storm_object_table.polygon_object_latlng_deg: Instance of `shapely.geometry.Polygon`, with x-coords in longitude (deg E) and y-coords in latitude (deg N). :param min_distances_metres: length-K numpy array of minimum distances. If the storm object is inside the [k]th buffer -- i.e., the [k]th buffer has no minimum distance -- then min_distances_metres[k] should be NaN. :param max_distances_metres: length-K numpy array of max distances. :return: storm_object_table: Same as input but with K additional columns (one per distance buffer). Column names are generated by `buffer_to_column_name`, and each value in these columns is a `shapely.geometry.Polygon` object, with x-coords in longitude (deg E) and y-coords in latitude (deg N). """ num_buffers = len(min_distances_metres) these_expected_dim = numpy.array([num_buffers], dtype=int) error_checking.assert_is_numpy_array(max_distances_metres, exact_dimensions=these_expected_dim) global_centroid_lat_deg, global_centroid_lng_deg = ( geodetic_utils.get_latlng_centroid( latitudes_deg=storm_object_table[CENTROID_LATITUDE_COLUMN].values, longitudes_deg=storm_object_table[CENTROID_LONGITUDE_COLUMN].values )) projection_object = projections.init_azimuthal_equidistant_projection( central_latitude_deg=global_centroid_lat_deg, central_longitude_deg=global_centroid_lng_deg) num_storm_objects = len(storm_object_table.index) object_array = numpy.full(num_storm_objects, numpy.nan, dtype=object) buffer_column_names = [''] * num_buffers for j in range(num_buffers): buffer_column_names[j] = buffer_to_column_name( min_distance_metres=min_distances_metres[j], max_distance_metres=max_distances_metres[j]) storm_object_table = storm_object_table.assign( **{buffer_column_names[j]: object_array}) for i in range(num_storm_objects): this_orig_vertex_dict_latlng_deg = ( polygons.polygon_object_to_vertex_arrays( storm_object_table[LATLNG_POLYGON_COLUMN].values[i])) these_orig_x_metres, these_orig_y_metres = ( projections.project_latlng_to_xy( latitudes_deg=this_orig_vertex_dict_latlng_deg[ polygons.EXTERIOR_Y_COLUMN], longitudes_deg=this_orig_vertex_dict_latlng_deg[ polygons.EXTERIOR_X_COLUMN], projection_object=projection_object)) for j in range(num_buffers): this_buffer_poly_object_xy_metres = polygons.buffer_simple_polygon( vertex_x_metres=these_orig_x_metres, vertex_y_metres=these_orig_y_metres, min_buffer_dist_metres=min_distances_metres[j], max_buffer_dist_metres=max_distances_metres[j]) this_buffer_vertex_dict = polygons.polygon_object_to_vertex_arrays( this_buffer_poly_object_xy_metres) (this_buffer_vertex_dict[polygons.EXTERIOR_Y_COLUMN], this_buffer_vertex_dict[polygons.EXTERIOR_X_COLUMN] ) = projections.project_xy_to_latlng( x_coords_metres=this_buffer_vertex_dict[ polygons.EXTERIOR_X_COLUMN], y_coords_metres=this_buffer_vertex_dict[ polygons.EXTERIOR_Y_COLUMN], projection_object=projection_object) this_num_holes = len( this_buffer_vertex_dict[polygons.HOLE_X_COLUMN]) for k in range(this_num_holes): (this_buffer_vertex_dict[polygons.HOLE_Y_COLUMN][k], this_buffer_vertex_dict[polygons.HOLE_X_COLUMN][k] ) = projections.project_xy_to_latlng( x_coords_metres=this_buffer_vertex_dict[ polygons.HOLE_X_COLUMN][k], y_coords_metres=this_buffer_vertex_dict[ polygons.HOLE_Y_COLUMN][k], projection_object=projection_object) this_buffer_poly_object_latlng_deg = ( polygons.vertex_arrays_to_polygon_object( exterior_x_coords=this_buffer_vertex_dict[ polygons.EXTERIOR_X_COLUMN], exterior_y_coords=this_buffer_vertex_dict[ polygons.EXTERIOR_Y_COLUMN], hole_x_coords_list=this_buffer_vertex_dict[ polygons.HOLE_X_COLUMN], hole_y_coords_list=this_buffer_vertex_dict[ polygons.HOLE_Y_COLUMN])) storm_object_table[buffer_column_names[j]].values[i] = ( this_buffer_poly_object_latlng_deg) return storm_object_table
def read_raw_file(raw_file_name): """Reads tracking data from raw (either JSON or ASCII) file. This file should contain all storm objects at one time step. :param raw_file_name: Path to input file. :return: storm_object_table: See documentation for `storm_tracking_io.write_processed_file`. """ error_checking.assert_file_exists(raw_file_name) _, pathless_file_name = os.path.split(raw_file_name) _, file_extension = os.path.splitext(pathless_file_name) _check_raw_file_extension(file_extension) unix_time_sec = raw_file_name_to_time(raw_file_name) if file_extension == ASCII_FILE_EXTENSION: storm_ids = [] east_velocities_m_s01 = [] north_velocities_m_s01 = [] list_of_latitude_vertex_arrays_deg = [] list_of_longitude_vertex_arrays_deg = [] for this_line in open(raw_file_name, 'r').readlines(): these_words = this_line.split(':') if len(these_words) < MIN_WORDS_PER_ASCII_LINE: continue storm_ids.append(these_words[STORM_ID_INDEX_IN_ASCII_FILES]) east_velocities_m_s01.append( float(these_words[U_MOTION_INDEX_IN_ASCII_FILES])) north_velocities_m_s01.append( -1 * float(these_words[V_MOTION_INDEX_IN_ASCII_FILES])) these_polygon_words = numpy.array( these_words[POLYGON_INDEX_IN_ASCII_FILES].split(',')) these_latitude_words = these_polygon_words[ LATITUDE_INDEX_IN_ASCII_FILES::2].tolist() these_longitude_words = these_polygon_words[ LONGITUDE_INDEX_IN_ASCII_FILES::2].tolist() these_latitudes_deg = numpy.array( [float(w) for w in these_latitude_words]) these_longitudes_deg = numpy.array( [float(w) for w in these_longitude_words]) list_of_latitude_vertex_arrays_deg.append(these_latitudes_deg) list_of_longitude_vertex_arrays_deg.append(these_longitudes_deg) east_velocities_m_s01 = numpy.array(east_velocities_m_s01) north_velocities_m_s01 = numpy.array(north_velocities_m_s01) num_storms = len(storm_ids) else: with open(raw_file_name) as json_file_handle: probsevere_dict = json.load(json_file_handle) num_storms = len(probsevere_dict[FEATURES_KEY_IN_JSON_FILES]) storm_ids = [None] * num_storms east_velocities_m_s01 = numpy.full(num_storms, numpy.nan) north_velocities_m_s01 = numpy.full(num_storms, numpy.nan) list_of_latitude_vertex_arrays_deg = [None] * num_storms list_of_longitude_vertex_arrays_deg = [None] * num_storms for i in range(num_storms): storm_ids[i] = str( probsevere_dict[FEATURES_KEY_IN_JSON_FILES][i] [PROPERTIES_KEY_IN_JSON_FILES][STORM_ID_KEY_IN_JSON_FILES]) east_velocities_m_s01[i] = float( probsevere_dict[FEATURES_KEY_IN_JSON_FILES][i] [PROPERTIES_KEY_IN_JSON_FILES][U_MOTION_KEY_IN_JSON_FILES]) north_velocities_m_s01[i] = -1 * float( probsevere_dict[FEATURES_KEY_IN_JSON_FILES][i] [PROPERTIES_KEY_IN_JSON_FILES][V_MOTION_KEY_IN_JSON_FILES]) this_vertex_matrix_deg = numpy.array( probsevere_dict[FEATURES_KEY_IN_JSON_FILES][i] [GEOMETRY_KEY_IN_JSON_FILES][COORDINATES_KEY_IN_JSON_FILES][0]) list_of_latitude_vertex_arrays_deg[i] = numpy.array( this_vertex_matrix_deg[:, LATITUDE_INDEX_IN_JSON_FILES]) list_of_longitude_vertex_arrays_deg[i] = numpy.array( this_vertex_matrix_deg[:, LONGITUDE_INDEX_IN_JSON_FILES]) spc_date_unix_sec = time_conversion.time_to_spc_date_unix_sec( unix_time_sec) unix_times_sec = numpy.full(num_storms, unix_time_sec, dtype=int) spc_dates_unix_sec = numpy.full(num_storms, spc_date_unix_sec, dtype=int) tracking_start_times_unix_sec = numpy.full( num_storms, DUMMY_TRACKING_START_TIME_UNIX_SEC, dtype=int) tracking_end_times_unix_sec = numpy.full(num_storms, DUMMY_TRACKING_END_TIME_UNIX_SEC, dtype=int) storm_object_dict = { tracking_utils.STORM_ID_COLUMN: storm_ids, tracking_utils.EAST_VELOCITY_COLUMN: east_velocities_m_s01, tracking_utils.NORTH_VELOCITY_COLUMN: north_velocities_m_s01, tracking_utils.TIME_COLUMN: unix_times_sec, tracking_utils.SPC_DATE_COLUMN: spc_dates_unix_sec, tracking_utils.TRACKING_START_TIME_COLUMN: tracking_start_times_unix_sec, tracking_utils.TRACKING_END_TIME_COLUMN: tracking_end_times_unix_sec } storm_object_table = pandas.DataFrame.from_dict(storm_object_dict) storm_ages_sec = numpy.full(num_storms, numpy.nan) simple_array = numpy.full(num_storms, numpy.nan) object_array = numpy.full(num_storms, numpy.nan, dtype=object) nested_array = storm_object_table[[ tracking_utils.STORM_ID_COLUMN, tracking_utils.STORM_ID_COLUMN ]].values.tolist() argument_dict = { tracking_utils.AGE_COLUMN: storm_ages_sec, tracking_utils.CENTROID_LAT_COLUMN: simple_array, tracking_utils.CENTROID_LNG_COLUMN: simple_array, tracking_utils.GRID_POINT_LAT_COLUMN: nested_array, tracking_utils.GRID_POINT_LNG_COLUMN: nested_array, tracking_utils.GRID_POINT_ROW_COLUMN: nested_array, tracking_utils.GRID_POINT_COLUMN_COLUMN: nested_array, tracking_utils.POLYGON_OBJECT_LATLNG_COLUMN: object_array, tracking_utils.POLYGON_OBJECT_ROWCOL_COLUMN: object_array } storm_object_table = storm_object_table.assign(**argument_dict) for i in range(num_storms): these_vertex_rows, these_vertex_columns = ( radar_utils.latlng_to_rowcol( latitudes_deg=list_of_latitude_vertex_arrays_deg[i], longitudes_deg=list_of_longitude_vertex_arrays_deg[i], nw_grid_point_lat_deg=NW_GRID_POINT_LAT_DEG, nw_grid_point_lng_deg=NW_GRID_POINT_LNG_DEG, lat_spacing_deg=GRID_LAT_SPACING_DEG, lng_spacing_deg=GRID_LNG_SPACING_DEG)) these_vertex_rows, these_vertex_columns = ( polygons.fix_probsevere_vertices( row_indices_orig=these_vertex_rows, column_indices_orig=these_vertex_columns)) these_vertex_latitudes_deg, these_vertex_longitudes_deg = ( radar_utils.rowcol_to_latlng( grid_rows=these_vertex_rows, grid_columns=these_vertex_columns, nw_grid_point_lat_deg=NW_GRID_POINT_LAT_DEG, nw_grid_point_lng_deg=NW_GRID_POINT_LNG_DEG, lat_spacing_deg=GRID_LAT_SPACING_DEG, lng_spacing_deg=GRID_LNG_SPACING_DEG)) (storm_object_table[tracking_utils.GRID_POINT_ROW_COLUMN].values[i], storm_object_table[tracking_utils.GRID_POINT_COLUMN_COLUMN].values[i] ) = polygons.simple_polygon_to_grid_points( vertex_row_indices=these_vertex_rows, vertex_column_indices=these_vertex_columns) (storm_object_table[tracking_utils.GRID_POINT_LAT_COLUMN].values[i], storm_object_table[tracking_utils.GRID_POINT_LNG_COLUMN].values[i] ) = radar_utils.rowcol_to_latlng( grid_rows=storm_object_table[ tracking_utils.GRID_POINT_ROW_COLUMN].values[i], grid_columns=storm_object_table[ tracking_utils.GRID_POINT_COLUMN_COLUMN].values[i], nw_grid_point_lat_deg=NW_GRID_POINT_LAT_DEG, nw_grid_point_lng_deg=NW_GRID_POINT_LNG_DEG, lat_spacing_deg=GRID_LAT_SPACING_DEG, lng_spacing_deg=GRID_LNG_SPACING_DEG) (storm_object_table[tracking_utils.CENTROID_LAT_COLUMN].values[i], storm_object_table[tracking_utils.CENTROID_LNG_COLUMN].values[i] ) = geodetic_utils.get_latlng_centroid( latitudes_deg=these_vertex_latitudes_deg, longitudes_deg=these_vertex_longitudes_deg) storm_object_table[tracking_utils.POLYGON_OBJECT_ROWCOL_COLUMN].values[ i] = polygons.vertex_arrays_to_polygon_object( exterior_x_coords=these_vertex_columns, exterior_y_coords=these_vertex_rows) storm_object_table[tracking_utils.POLYGON_OBJECT_LATLNG_COLUMN].values[ i] = polygons.vertex_arrays_to_polygon_object( exterior_x_coords=these_vertex_longitudes_deg, exterior_y_coords=these_vertex_latitudes_deg) return storm_object_table
THESE_VERTEX_ROWS, THESE_VERTEX_COLUMNS = ( polygons.grid_points_in_poly_to_vertices( GRID_POINT_ROWS_BY_STORM[i], GRID_POINT_COLUMNS_BY_STORM[i])) THESE_VERTEX_LATITUDES_DEG, THESE_VERTEX_LONGITUDES_DEG = ( radar_utils.rowcol_to_latlng( THESE_VERTEX_ROWS, THESE_VERTEX_COLUMNS, nw_grid_point_lat_deg=NW_GRID_POINT_LAT_DEG, nw_grid_point_lng_deg=NW_GRID_POINT_LNG_DEG, lat_spacing_deg=LATITUDE_SPACING_DEG, lng_spacing_deg=LONGITUDE_SPACING_DEG)) (THIS_CENTROID_LAT_DEG, THIS_CENTROID_LNG_DEG) = geodetic_utils.get_latlng_centroid( latitudes_deg=THESE_VERTEX_LATITUDES_DEG, longitudes_deg=THESE_VERTEX_LONGITUDES_DEG) STORM_OBJECT_TABLE_SMALL_SCALE[ tracking_utils.CENTROID_LAT_COLUMN].values[i] = THIS_CENTROID_LAT_DEG STORM_OBJECT_TABLE_SMALL_SCALE[ tracking_utils.CENTROID_LNG_COLUMN].values[i] = THIS_CENTROID_LNG_DEG STORM_OBJECT_TABLE_SMALL_SCALE[ tracking_utils.POLYGON_OBJECT_LATLNG_COLUMN].values[i] = ( polygons.vertex_arrays_to_polygon_object( THESE_VERTEX_LONGITUDES_DEG, THESE_VERTEX_LATITUDES_DEG)) STORM_OBJECT_TABLE_SMALL_SCALE[ tracking_utils.POLYGON_OBJECT_ROWCOL_COLUMN].values[i] = ( polygons.vertex_arrays_to_polygon_object(THESE_VERTEX_COLUMNS, THESE_VERTEX_ROWS))
def read_polygons_from_netcdf(netcdf_file_name, metadata_dict, spc_date_string, tracking_start_time_unix_sec, tracking_end_time_unix_sec, raise_error_if_fails=True): """Reads storm polygons (outlines of storm cells) from NetCDF file. P = number of grid points in storm cell (different for each storm cell) V = number of vertices in storm polygon (different for each storm cell) If file cannot be opened, returns None. :param netcdf_file_name: Path to input file. :param metadata_dict: Dictionary with metadata for NetCDF file, created by `myrorss_and_mrms_io.read_metadata_from_raw_file`. :param spc_date_string: SPC date (format "yyyymmdd"). :param tracking_start_time_unix_sec: Start time for tracking period. This can be found by `get_start_end_times_for_spc_date`. :param tracking_end_time_unix_sec: End time for tracking period. This can be found by `get_start_end_times_for_spc_date`. :param raise_error_if_fails: Boolean flag. If True and file cannot be opened, this method will raise an error. :return: polygon_table: If file cannot be opened and raise_error_if_fails = False, this is None. Otherwise, it is a pandas DataFrame with the following columns. polygon_table.storm_id: String ID for storm cell. polygon_table.unix_time_sec: Time in Unix format. polygon_table.spc_date_unix_sec: SPC date in Unix format. polygon_table.tracking_start_time_unix_sec: Start time for tracking period. polygon_table.tracking_end_time_unix_sec: End time for tracking period. polygon_table.centroid_lat_deg: Latitude at centroid of storm cell (deg N). polygon_table.centroid_lng_deg: Longitude at centroid of storm cell (deg E). polygon_table.grid_point_latitudes_deg: length-P numpy array with latitudes (deg N) of grid points in storm cell. polygon_table.grid_point_longitudes_deg: length-P numpy array with longitudes (deg E) of grid points in storm cell. polygon_table.grid_point_rows: length-P numpy array with row indices (all integers) of grid points in storm cell. polygon_table.grid_point_columns: length-P numpy array with column indices (all integers) of grid points in storm cell. polygon_table.polygon_object_latlng: Instance of `shapely.geometry.Polygon` with vertices in lat-long coordinates. polygon_table.polygon_object_rowcol: Instance of `shapely.geometry.Polygon` with vertices in row-column coordinates. """ error_checking.assert_file_exists(netcdf_file_name) error_checking.assert_is_integer(tracking_start_time_unix_sec) error_checking.assert_is_not_nan(tracking_start_time_unix_sec) error_checking.assert_is_integer(tracking_end_time_unix_sec) error_checking.assert_is_not_nan(tracking_end_time_unix_sec) netcdf_dataset = netcdf_io.open_netcdf(netcdf_file_name, raise_error_if_fails) if netcdf_dataset is None: return None storm_id_var_name = metadata_dict[radar_utils.FIELD_NAME_COLUMN] storm_id_var_name_orig = metadata_dict[ myrorss_and_mrms_io.FIELD_NAME_COLUMN_ORIG] num_values = len( netcdf_dataset.variables[myrorss_and_mrms_io.GRID_ROW_COLUMN_ORIG]) if num_values == 0: sparse_grid_dict = { myrorss_and_mrms_io.GRID_ROW_COLUMN: numpy.array([], dtype=int), myrorss_and_mrms_io.GRID_COLUMN_COLUMN: numpy.array([], dtype=int), myrorss_and_mrms_io.NUM_GRID_CELL_COLUMN: numpy.array([], dtype=int), storm_id_var_name: numpy.array([], dtype=int) } else: sparse_grid_dict = { myrorss_and_mrms_io.GRID_ROW_COLUMN: netcdf_dataset.variables[myrorss_and_mrms_io.GRID_ROW_COLUMN_ORIG] [:], myrorss_and_mrms_io.GRID_COLUMN_COLUMN: netcdf_dataset.variables[ myrorss_and_mrms_io.GRID_COLUMN_COLUMN_ORIG][:], myrorss_and_mrms_io.NUM_GRID_CELL_COLUMN: netcdf_dataset.variables[ myrorss_and_mrms_io.NUM_GRID_CELL_COLUMN_ORIG][:], storm_id_var_name: netcdf_dataset.variables[storm_id_var_name_orig][:] } netcdf_dataset.close() sparse_grid_table = pandas.DataFrame.from_dict(sparse_grid_dict) numeric_storm_id_matrix, _, _ = (radar_s2f.sparse_to_full_grid( sparse_grid_table, metadata_dict)) polygon_table = _storm_id_matrix_to_coord_lists(numeric_storm_id_matrix) num_storms = len(polygon_table.index) unix_times_sec = numpy.full(num_storms, metadata_dict[radar_utils.UNIX_TIME_COLUMN], dtype=int) spc_date_unix_sec = time_conversion.spc_date_string_to_unix_sec( spc_date_string) spc_dates_unix_sec = numpy.full(num_storms, spc_date_unix_sec, dtype=int) tracking_start_times_unix_sec = numpy.full(num_storms, tracking_start_time_unix_sec, dtype=int) tracking_end_times_unix_sec = numpy.full(num_storms, tracking_end_time_unix_sec, dtype=int) storm_ids = _append_spc_date_to_storm_ids( polygon_table[tracking_utils.STORM_ID_COLUMN].values, spc_date_string) simple_array = numpy.full(num_storms, numpy.nan) object_array = numpy.full(num_storms, numpy.nan, dtype=object) nested_array = polygon_table[[ tracking_utils.STORM_ID_COLUMN, tracking_utils.STORM_ID_COLUMN ]].values.tolist() argument_dict = { tracking_utils.STORM_ID_COLUMN: storm_ids, tracking_utils.TIME_COLUMN: unix_times_sec, tracking_utils.SPC_DATE_COLUMN: spc_dates_unix_sec, tracking_utils.TRACKING_START_TIME_COLUMN: tracking_start_times_unix_sec, tracking_utils.TRACKING_END_TIME_COLUMN: tracking_end_times_unix_sec, tracking_utils.CENTROID_LAT_COLUMN: simple_array, tracking_utils.CENTROID_LNG_COLUMN: simple_array, tracking_utils.GRID_POINT_LAT_COLUMN: nested_array, tracking_utils.GRID_POINT_LNG_COLUMN: nested_array, tracking_utils.POLYGON_OBJECT_LATLNG_COLUMN: object_array, tracking_utils.POLYGON_OBJECT_ROWCOL_COLUMN: object_array } polygon_table = polygon_table.assign(**argument_dict) for i in range(num_storms): these_vertex_rows, these_vertex_columns = ( polygons.grid_points_in_poly_to_vertices( polygon_table[tracking_utils.GRID_POINT_ROW_COLUMN].values[i], polygon_table[ tracking_utils.GRID_POINT_COLUMN_COLUMN].values[i])) # these_vertex_rows, these_vertex_columns = ( # polygons.grid_points_in_poly_to_vertices( # metadata_dict[radar_utils.NUM_LAT_COLUMN] - # polygon_table[tracking_utils.GRID_POINT_ROW_COLUMN].values[i], # polygon_table[ # tracking_utils.GRID_POINT_COLUMN_COLUMN].values[i])) # # these_vertex_rows = ( # metadata_dict[radar_utils.NUM_LAT_COLUMN] - these_vertex_rows) (polygon_table[tracking_utils.GRID_POINT_ROW_COLUMN].values[i], polygon_table[tracking_utils.GRID_POINT_COLUMN_COLUMN].values[i]) = ( polygons.simple_polygon_to_grid_points(these_vertex_rows, these_vertex_columns)) (polygon_table[tracking_utils.GRID_POINT_LAT_COLUMN].values[i], polygon_table[tracking_utils.GRID_POINT_LNG_COLUMN].values[i] ) = (radar_utils.rowcol_to_latlng( polygon_table[tracking_utils.GRID_POINT_ROW_COLUMN].values[i], polygon_table[tracking_utils.GRID_POINT_COLUMN_COLUMN].values[i], nw_grid_point_lat_deg=metadata_dict[ radar_utils.NW_GRID_POINT_LAT_COLUMN], nw_grid_point_lng_deg=metadata_dict[ radar_utils.NW_GRID_POINT_LNG_COLUMN], lat_spacing_deg=metadata_dict[radar_utils.LAT_SPACING_COLUMN], lng_spacing_deg=metadata_dict[radar_utils.LNG_SPACING_COLUMN])) these_vertex_lat_deg, these_vertex_lng_deg = ( radar_utils.rowcol_to_latlng( these_vertex_rows, these_vertex_columns, nw_grid_point_lat_deg=metadata_dict[ radar_utils.NW_GRID_POINT_LAT_COLUMN], nw_grid_point_lng_deg=metadata_dict[ radar_utils.NW_GRID_POINT_LNG_COLUMN], lat_spacing_deg=metadata_dict[radar_utils.LAT_SPACING_COLUMN], lng_spacing_deg=metadata_dict[radar_utils.LNG_SPACING_COLUMN])) (polygon_table[tracking_utils.CENTROID_LAT_COLUMN].values[i], polygon_table[tracking_utils.CENTROID_LNG_COLUMN].values[i] ) = geodetic_utils.get_latlng_centroid( latitudes_deg=these_vertex_lat_deg, longitudes_deg=these_vertex_lng_deg) polygon_table[ tracking_utils.POLYGON_OBJECT_ROWCOL_COLUMN].values[i] = ( polygons.vertex_arrays_to_polygon_object( these_vertex_columns, these_vertex_rows)) polygon_table[ tracking_utils.POLYGON_OBJECT_LATLNG_COLUMN].values[i] = ( polygons.vertex_arrays_to_polygon_object( these_vertex_lng_deg, these_vertex_lat_deg)) return polygon_table
def make_buffers_around_storm_objects( storm_object_table, min_distances_metres, max_distances_metres): """Creates one or more distance buffers around each storm object. N = number of storm objects B = number of buffers around each storm object V = number of vertices in a given buffer :param storm_object_table: N-row pandas DataFrame with the following columns. storm_object_table.storm_id: String ID for storm cell. storm_object_table.polygon_object_latlng: Instance of `shapely.geometry.Polygon`, containing vertices of storm object in lat-long coordinates. :param min_distances_metres: length-B numpy array of minimum buffer distances. If min_distances_metres[i] is NaN, the storm object is included in the [i]th buffer, so the [i]th buffer is inclusive. If min_distances_metres[i] is a real number, the storm object is *not* included in the [i]th buffer, so the [i]th buffer is exclusive. :param max_distances_metres: length-B numpy array of maximum buffer distances. Must be all real numbers (no NaN). :return: storm_object_table: Same as input, but with B additional columns. Each additional column (listed below) contains a `shapely.geometry.Polygon` instance for each storm object. Each `shapely.geometry.Polygon` instance contains the lat-long vertices of one distance buffer around one storm object. storm_object_table.polygon_object_latlng_buffer_<D>m: For an inclusive buffer of D metres around the storm. storm_object_table.polygon_object_latlng_buffer_<d>_<D>m: For an exclusive buffer of d...D metres around the storm. """ error_checking.assert_is_geq_numpy_array( min_distances_metres, 0., allow_nan=True) error_checking.assert_is_numpy_array( min_distances_metres, num_dimensions=1) num_buffers = len(min_distances_metres) error_checking.assert_is_geq_numpy_array( max_distances_metres, 0., allow_nan=False) error_checking.assert_is_numpy_array( max_distances_metres, exact_dimensions=numpy.array([num_buffers])) for j in range(num_buffers): if numpy.isnan(min_distances_metres[j]): continue error_checking.assert_is_greater( max_distances_metres[j], min_distances_metres[j], allow_nan=False) num_storm_objects = len(storm_object_table.index) centroid_latitudes_deg = numpy.full(num_storm_objects, numpy.nan) centroid_longitudes_deg = numpy.full(num_storm_objects, numpy.nan) for i in range(num_storm_objects): this_centroid_object = storm_object_table[ POLYGON_OBJECT_LATLNG_COLUMN].values[0].centroid centroid_latitudes_deg[i] = this_centroid_object.y centroid_longitudes_deg[i] = this_centroid_object.x (global_centroid_lat_deg, global_centroid_lng_deg ) = geodetic_utils.get_latlng_centroid( latitudes_deg=centroid_latitudes_deg, longitudes_deg=centroid_longitudes_deg) projection_object = projections.init_azimuthal_equidistant_projection( global_centroid_lat_deg, global_centroid_lng_deg) object_array = numpy.full(num_storm_objects, numpy.nan, dtype=object) argument_dict = {} buffer_column_names = [''] * num_buffers for j in range(num_buffers): buffer_column_names[j] = distance_buffer_to_column_name( min_distances_metres[j], max_distances_metres[j]) argument_dict.update({buffer_column_names[j]: object_array}) storm_object_table = storm_object_table.assign(**argument_dict) for i in range(num_storm_objects): orig_vertex_dict_latlng = polygons.polygon_object_to_vertex_arrays( storm_object_table[POLYGON_OBJECT_LATLNG_COLUMN].values[i]) (orig_vertex_x_metres, orig_vertex_y_metres) = projections.project_latlng_to_xy( orig_vertex_dict_latlng[polygons.EXTERIOR_Y_COLUMN], orig_vertex_dict_latlng[polygons.EXTERIOR_X_COLUMN], projection_object=projection_object) for j in range(num_buffers): buffer_polygon_object_xy = polygons.buffer_simple_polygon( orig_vertex_x_metres, orig_vertex_y_metres, min_buffer_dist_metres=min_distances_metres[j], max_buffer_dist_metres=max_distances_metres[j]) buffer_vertex_dict = polygons.polygon_object_to_vertex_arrays( buffer_polygon_object_xy) (buffer_vertex_dict[polygons.EXTERIOR_Y_COLUMN], buffer_vertex_dict[polygons.EXTERIOR_X_COLUMN]) = ( projections.project_xy_to_latlng( buffer_vertex_dict[polygons.EXTERIOR_X_COLUMN], buffer_vertex_dict[polygons.EXTERIOR_Y_COLUMN], projection_object=projection_object)) this_num_holes = len(buffer_vertex_dict[polygons.HOLE_X_COLUMN]) for k in range(this_num_holes): (buffer_vertex_dict[polygons.HOLE_Y_COLUMN][k], buffer_vertex_dict[polygons.HOLE_X_COLUMN][k]) = ( projections.project_xy_to_latlng( buffer_vertex_dict[polygons.HOLE_X_COLUMN][k], buffer_vertex_dict[polygons.HOLE_Y_COLUMN][k], projection_object=projection_object)) buffer_polygon_object_latlng = ( polygons.vertex_arrays_to_polygon_object( buffer_vertex_dict[polygons.EXTERIOR_X_COLUMN], buffer_vertex_dict[polygons.EXTERIOR_Y_COLUMN], hole_x_coords_list= buffer_vertex_dict[polygons.HOLE_X_COLUMN], hole_y_coords_list= buffer_vertex_dict[polygons.HOLE_Y_COLUMN])) storm_object_table[buffer_column_names[j]].values[ i] = buffer_polygon_object_latlng return storm_object_table
def read_polygons_from_netcdf(netcdf_file_name, metadata_dict, spc_date_string, tracking_start_time_unix_sec, tracking_end_time_unix_sec, raise_error_if_fails=True): """Reads storm polygons (outlines of storm cells) from NetCDF file. P = number of grid points in storm cell (different for each storm cell) V = number of vertices in storm polygon (different for each storm cell) If file cannot be opened, returns None. :param netcdf_file_name: Path to input file. :param metadata_dict: Dictionary with metadata for NetCDF file, created by `myrorss_and_mrms_io.read_metadata_from_raw_file`. :param spc_date_string: SPC date (format "yyyymmdd"). :param tracking_start_time_unix_sec: Start time for tracking period. This can be found by `get_start_end_times_for_spc_date`. :param tracking_end_time_unix_sec: End time for tracking period. This can be found by `get_start_end_times_for_spc_date`. :param raise_error_if_fails: Boolean flag. If True and file cannot be opened, this method will raise an error. :return: polygon_table: pandas DataFrame with the following columns. Each row is one storm object. polygon_table.primary_id_string: See documentation for `storm_tracking_io.write_file`. polygon_table.valid_time_unix_sec: Same. polygon_table.spc_date_string: Same. polygon_table.tracking_start_time_unix_sec: Same. polygon_table.tracking_end_time_unix_sec: Same. polygon_table.centroid_latitude_deg: Same. polygon_table.centroid_longitude_deg: Same. polygon_table.grid_point_latitudes_deg: Same. polygon_table.grid_point_longitudes_deg: Same. polygon_table.grid_point_rows: Same. polygon_table.grid_point_columns: Same. polygon_table.polygon_object_latlng_deg: Same. polygon_table.polygon_object_rowcol: Same. """ error_checking.assert_file_exists(netcdf_file_name) error_checking.assert_is_integer(tracking_start_time_unix_sec) error_checking.assert_is_not_nan(tracking_start_time_unix_sec) error_checking.assert_is_integer(tracking_end_time_unix_sec) error_checking.assert_is_not_nan(tracking_end_time_unix_sec) netcdf_dataset = netcdf_io.open_netcdf(netcdf_file_name, raise_error_if_fails) if netcdf_dataset is None: return None storm_id_column = metadata_dict[radar_utils.FIELD_NAME_COLUMN] storm_id_column_orig = metadata_dict[ myrorss_and_mrms_io.FIELD_NAME_COLUMN_ORIG] num_values = len( netcdf_dataset.variables[myrorss_and_mrms_io.GRID_ROW_COLUMN_ORIG]) if num_values == 0: sparse_grid_dict = { myrorss_and_mrms_io.GRID_ROW_COLUMN: numpy.array([], dtype=int), myrorss_and_mrms_io.GRID_COLUMN_COLUMN: numpy.array([], dtype=int), myrorss_and_mrms_io.NUM_GRID_CELL_COLUMN: numpy.array([], dtype=int), storm_id_column: numpy.array([], dtype=int) } else: sparse_grid_dict = { myrorss_and_mrms_io.GRID_ROW_COLUMN: netcdf_dataset.variables[myrorss_and_mrms_io.GRID_ROW_COLUMN_ORIG] [:], myrorss_and_mrms_io.GRID_COLUMN_COLUMN: netcdf_dataset.variables[ myrorss_and_mrms_io.GRID_COLUMN_COLUMN_ORIG][:], myrorss_and_mrms_io.NUM_GRID_CELL_COLUMN: netcdf_dataset.variables[ myrorss_and_mrms_io.NUM_GRID_CELL_COLUMN_ORIG][:], storm_id_column: netcdf_dataset.variables[storm_id_column_orig][:] } netcdf_dataset.close() sparse_grid_table = pandas.DataFrame.from_dict(sparse_grid_dict) numeric_id_matrix = radar_s2f.sparse_to_full_grid(sparse_grid_table, metadata_dict)[0] polygon_table = _id_matrix_to_coord_lists(numeric_id_matrix) num_storms = len(polygon_table.index) valid_times_unix_sec = numpy.full( num_storms, metadata_dict[radar_utils.UNIX_TIME_COLUMN], dtype=int) spc_date_strings = num_storms * [ time_conversion.time_to_spc_date_string(valid_times_unix_sec[0]) ] tracking_start_times_unix_sec = numpy.full(num_storms, tracking_start_time_unix_sec, dtype=int) tracking_end_times_unix_sec = numpy.full(num_storms, tracking_end_time_unix_sec, dtype=int) simple_array = numpy.full(num_storms, numpy.nan) object_array = numpy.full(num_storms, numpy.nan, dtype=object) nested_array = polygon_table[[ tracking_utils.PRIMARY_ID_COLUMN, tracking_utils.PRIMARY_ID_COLUMN ]].values.tolist() argument_dict = { tracking_utils.VALID_TIME_COLUMN: valid_times_unix_sec, tracking_utils.SPC_DATE_COLUMN: spc_date_strings, tracking_utils.TRACKING_START_TIME_COLUMN: tracking_start_times_unix_sec, tracking_utils.TRACKING_END_TIME_COLUMN: tracking_end_times_unix_sec, tracking_utils.CENTROID_LATITUDE_COLUMN: simple_array, tracking_utils.CENTROID_LONGITUDE_COLUMN: simple_array, tracking_utils.LATITUDES_IN_STORM_COLUMN: nested_array, tracking_utils.LONGITUDES_IN_STORM_COLUMN: nested_array, tracking_utils.LATLNG_POLYGON_COLUMN: object_array, tracking_utils.ROWCOL_POLYGON_COLUMN: object_array } polygon_table = polygon_table.assign(**argument_dict) for i in range(num_storms): these_vertex_rows, these_vertex_columns = ( polygons.grid_points_in_poly_to_vertices( grid_point_row_indices=polygon_table[ tracking_utils.ROWS_IN_STORM_COLUMN].values[i], grid_point_column_indices=polygon_table[ tracking_utils.COLUMNS_IN_STORM_COLUMN].values[i])) (polygon_table[tracking_utils.ROWS_IN_STORM_COLUMN].values[i], polygon_table[tracking_utils.COLUMNS_IN_STORM_COLUMN].values[i] ) = polygons.simple_polygon_to_grid_points( vertex_row_indices=these_vertex_rows, vertex_column_indices=these_vertex_columns) (polygon_table[tracking_utils.LATITUDES_IN_STORM_COLUMN].values[i], polygon_table[tracking_utils.LONGITUDES_IN_STORM_COLUMN].values[i] ) = radar_utils.rowcol_to_latlng( grid_rows=polygon_table[ tracking_utils.ROWS_IN_STORM_COLUMN].values[i], grid_columns=polygon_table[ tracking_utils.COLUMNS_IN_STORM_COLUMN].values[i], nw_grid_point_lat_deg=metadata_dict[ radar_utils.NW_GRID_POINT_LAT_COLUMN], nw_grid_point_lng_deg=metadata_dict[ radar_utils.NW_GRID_POINT_LNG_COLUMN], lat_spacing_deg=metadata_dict[radar_utils.LAT_SPACING_COLUMN], lng_spacing_deg=metadata_dict[radar_utils.LNG_SPACING_COLUMN]) these_vertex_lat_deg, these_vertex_lng_deg = ( radar_utils.rowcol_to_latlng( grid_rows=these_vertex_rows, grid_columns=these_vertex_columns, nw_grid_point_lat_deg=metadata_dict[ radar_utils.NW_GRID_POINT_LAT_COLUMN], nw_grid_point_lng_deg=metadata_dict[ radar_utils.NW_GRID_POINT_LNG_COLUMN], lat_spacing_deg=metadata_dict[radar_utils.LAT_SPACING_COLUMN], lng_spacing_deg=metadata_dict[radar_utils.LNG_SPACING_COLUMN])) (polygon_table[tracking_utils.CENTROID_LATITUDE_COLUMN].values[i], polygon_table[tracking_utils.CENTROID_LONGITUDE_COLUMN].values[i] ) = geodetic_utils.get_latlng_centroid( latitudes_deg=these_vertex_lat_deg, longitudes_deg=these_vertex_lng_deg) polygon_table[tracking_utils.ROWCOL_POLYGON_COLUMN].values[i] = ( polygons.vertex_arrays_to_polygon_object( exterior_x_coords=these_vertex_columns, exterior_y_coords=these_vertex_rows)) polygon_table[tracking_utils.LATLNG_POLYGON_COLUMN].values[i] = ( polygons.vertex_arrays_to_polygon_object( exterior_x_coords=these_vertex_lng_deg, exterior_y_coords=these_vertex_lat_deg)) primary_id_strings = _append_spc_date_to_storm_ids( primary_id_strings=polygon_table[ tracking_utils.PRIMARY_ID_COLUMN].values, spc_date_string=spc_date_string) return polygon_table.assign( **{tracking_utils.PRIMARY_ID_COLUMN: primary_id_strings})