def test_find_predecessors_first_5sec_all_0changes(self): """Ensures correct output from find_predecessors. In this case, working on the first table with max time difference = 5 seconds and max ID changes = 0. Looking for all predecessors on path to earliest. """ this_num_storm_objects = len(FIRST_STORM_OBJECT_TABLE.index) for i in range(this_num_storm_objects): these_predecessor_rows = temporal_tracking.find_predecessors( storm_object_table=FIRST_STORM_OBJECT_TABLE, target_row=i, num_seconds_back=5, max_num_sec_id_changes=0, return_all_on_path=True) these_predecessor_rows = numpy.sort(these_predecessor_rows) these_expected_rows = numpy.sort( numpy.array(FIRST_PREDECESSOR_DICT_5SEC_ALL_0CHANGES[i], dtype=int)) self.assertTrue( numpy.array_equal(these_predecessor_rows, these_expected_rows))
def test_find_predecessors_first_5sec(self): """Ensures correct output from find_predecessors. In this case, working on the first table with max time difference = 5 seconds. """ this_num_storm_objects = len(FIRST_STORM_OBJECT_TABLE.index) for i in range(this_num_storm_objects): these_predecessor_rows = temporal_tracking.find_predecessors( storm_object_table=FIRST_STORM_OBJECT_TABLE, target_row=i, num_seconds_back=5) these_predecessor_rows = numpy.sort(these_predecessor_rows) these_expected_rows = numpy.sort( numpy.array(FIRST_PREDECESSOR_DICT_5SEC[i], dtype=int)) self.assertTrue( numpy.array_equal(these_predecessor_rows, these_expected_rows))
def _plot_schema(storm_object_table, output_file_name): """Plots schema for storm-velocity estimation. :param storm_object_table: pandas DataFrame created by `_create_tracking_data`. :param output_file_name: Path to output file (figure will be saved here). """ centroid_x_coords = storm_object_table[ tracking_utils.CENTROID_X_COLUMN].values centroid_y_coords = storm_object_table[ tracking_utils.CENTROID_Y_COLUMN].values secondary_id_strings = storm_object_table[ tracking_utils.SECONDARY_ID_COLUMN].values storm_object_table = storm_object_table.assign( **{ tracking_utils.CENTROID_LONGITUDE_COLUMN: centroid_x_coords, tracking_utils.CENTROID_LATITUDE_COLUMN: centroid_y_coords }) figure_object, axes_object, basemap_object = ( plotting_utils.create_equidist_cylindrical_map( min_latitude_deg=numpy.min(centroid_y_coords), max_latitude_deg=numpy.max(centroid_y_coords), min_longitude_deg=numpy.min(centroid_x_coords), max_longitude_deg=numpy.max(centroid_x_coords))) storm_plotting.plot_storm_tracks(storm_object_table=storm_object_table, axes_object=axes_object, basemap_object=basemap_object, colour_map_object=None, constant_colour=TRACK_COLOUR, line_width=TRACK_WIDTH, start_marker_type=None, end_marker_type=None) num_storm_objects = len(storm_object_table.index) predecessor_rows = temporal_tracking.find_predecessors( storm_object_table=storm_object_table, target_row=num_storm_objects - 1, num_seconds_back=100, return_all_on_path=False) legend_handles = [None] * 3 legend_strings = [None] * 3 this_handle = axes_object.plot(centroid_x_coords[[0, 0]], centroid_y_coords[[0, 0]], color=TRACK_COLOUR, linestyle='solid', linewidth=TRACK_WIDTH)[0] legend_handles[-1] = this_handle legend_strings[-1] = 'Storm track' for i in range(num_storm_objects): if i in predecessor_rows or i == num_storm_objects - 1: this_colour = SPECIAL_STORM_COLOUR else: this_colour = DEFAULT_STORM_COLOUR this_handle = axes_object.plot(centroid_x_coords[i], centroid_y_coords[i], linestyle='None', marker=MARKER_TYPE, markersize=MARKER_SIZE, markerfacecolor=this_colour, markeredgecolor=this_colour, markeredgewidth=MARKER_EDGE_WIDTH)[0] if i in predecessor_rows or i == num_storm_objects - 1: legend_handles[0] = this_handle legend_strings[0] = 'Object used in\nvelocity estimate' else: legend_handles[1] = this_handle legend_strings[1] = 'Object not used' axes_object.text(centroid_x_coords[i], centroid_y_coords[i] - TEXT_OFFSET, secondary_id_strings[i], color=this_colour, fontsize=FONT_SIZE, fontweight='bold', horizontalalignment='center', verticalalignment='top') axes_object.set_yticks([], []) storm_times_minutes = storm_object_table[ tracking_utils.VALID_TIME_COLUMN].values x_tick_values, unique_indices = numpy.unique(centroid_x_coords, return_index=True) x_tick_labels = [ '{0:d}'.format(storm_times_minutes[i]) for i in unique_indices ] axes_object.set_xticks(x_tick_values) axes_object.set_xticklabels(x_tick_labels) axes_object.set_xlabel('Time (minutes)') axes_object.legend(legend_handles, legend_strings, fontsize=FONT_SIZE, loc=(0.02, 0.55)) print('Saving figure to: "{0:s}"...'.format(output_file_name)) figure_object.savefig(output_file_name, dpi=FIGURE_RESOLUTION_DPI, pad_inches=0, bbox_inches='tight') pyplot.close(figure_object)
def _handle_one_storm_cell(storm_object_table, primary_id_string, conus_latitudes_deg, conus_longitudes_deg, max_lead_time_sec): """Handles (either keeps or removes) one storm cell. In this case, a "storm cell" is a group of storm objects with the same primary ID. V = number of vertices in CONUS boundary :param storm_object_table: pandas DataFrame with columns listed in doc for `storm_tracking_io.write_file`. :param primary_id_string: Primary ID of storm cell. :param conus_latitudes_deg: length-V numpy array of latitudes (deg N) in boundary. :param conus_longitudes_deg: length-V numpy array of longitudes (deg E) in boundary. :param max_lead_time_sec: See documentation at top of file. :return: bad_object_indices: 1-D numpy array with indices of bad storm objects (those with successor outside CONUS). These are row indices for `storm_object_table`. """ object_in_cell_indices = numpy.where(storm_object_table[ tracking_utils.PRIMARY_ID_COLUMN].values == primary_id_string)[0] query_latitudes_deg = [] query_longitudes_deg = [] query_object_indices = [] num_storm_objects = len(object_in_cell_indices) for i in range(num_storm_objects): j = object_in_cell_indices[i] this_polygon_object = ( storm_object_table[tracking_utils.LATLNG_POLYGON_COLUMN].values[j]) these_latitudes_deg = numpy.array(this_polygon_object.exterior.xy[1]) these_longitudes_deg = numpy.array(this_polygon_object.exterior.xy[0]) # Find northeasternmost point in storm boundary. this_index = numpy.argmax(these_latitudes_deg + these_longitudes_deg) query_latitudes_deg.append(these_latitudes_deg[this_index]) query_longitudes_deg.append(these_longitudes_deg[this_index]) query_object_indices.append(j) # Find southwesternmost point in storm boundary. this_index = numpy.argmin(these_latitudes_deg + these_longitudes_deg) query_latitudes_deg.append(these_latitudes_deg[this_index]) query_longitudes_deg.append(these_longitudes_deg[this_index]) query_object_indices.append(j) # Find northwesternmost point in storm boundary. this_index = numpy.argmax(these_latitudes_deg - these_longitudes_deg) query_latitudes_deg.append(these_latitudes_deg[this_index]) query_longitudes_deg.append(these_longitudes_deg[this_index]) query_object_indices.append(j) # Find northeasternmost point in storm boundary. this_index = numpy.argmax(these_longitudes_deg - these_latitudes_deg) query_latitudes_deg.append(these_latitudes_deg[this_index]) query_longitudes_deg.append(these_longitudes_deg[this_index]) query_object_indices.append(j) query_latitudes_deg = numpy.array(query_latitudes_deg) query_longitudes_deg = numpy.array(query_longitudes_deg) query_object_indices = numpy.array(query_object_indices, dtype=int) in_conus_flags = conus_boundary.find_points_in_conus( conus_latitudes_deg=conus_latitudes_deg, conus_longitudes_deg=conus_longitudes_deg, query_latitudes_deg=query_latitudes_deg, query_longitudes_deg=query_longitudes_deg, use_shortcuts=True, verbose=False) if numpy.all(in_conus_flags): return numpy.array([], dtype=int) first_bad_index = numpy.where(numpy.invert(in_conus_flags))[0][0] first_bad_longitude_deg = -1 * lng_conversion.convert_lng_negative_in_west( query_longitudes_deg[first_bad_index]) print('Point ({0:.1f} deg N, {1:.1f} deg W) is not in CONUS!'.format( query_latitudes_deg[first_bad_index], first_bad_longitude_deg)) object_not_in_conus_indices = numpy.unique( query_object_indices[in_conus_flags == False]) bad_object_indices = numpy.array([], dtype=int) for i in object_not_in_conus_indices: these_indices = temporal_tracking.find_predecessors( storm_object_table=storm_object_table, target_row=i, num_seconds_back=max_lead_time_sec, max_num_sec_id_changes=1, return_all_on_path=True) bad_object_indices = numpy.concatenate( (bad_object_indices, these_indices)) return numpy.unique(bad_object_indices)