def test_find_successors_first_5sec_all_0mergers(self): """Ensures correct output from find_successors. In this case, working on the first table with max time difference = 5 seconds and max mergers = 0. Looking for all successors on path to earliest. """ this_num_storm_objects = len(FIRST_STORM_OBJECT_TABLE.index) for i in range(this_num_storm_objects): these_sucessor_rows = temporal_tracking.find_successors( storm_object_table=FIRST_STORM_OBJECT_TABLE, target_row=i, num_seconds_forward=5, max_num_sec_id_changes=0, change_type_string=temporal_tracking.MERGER_STRING, return_all_on_path=True) these_sucessor_rows = numpy.sort(these_sucessor_rows) these_expected_rows = numpy.sort( numpy.array(FIRST_SUCCESSOR_DICT_5SEC_ALL_0MERGERS[i], dtype=int)) self.assertTrue( numpy.array_equal(these_sucessor_rows, these_expected_rows))
def test_find_successors_first_5sec(self): """Ensures correct output from find_successors. In this case, working on the first table with max time difference = 5 seconds. """ this_num_storm_objects = len(FIRST_STORM_OBJECT_TABLE.index) for i in range(this_num_storm_objects): these_successor_rows = temporal_tracking.find_successors( storm_object_table=FIRST_STORM_OBJECT_TABLE, target_row=i, num_seconds_forward=5) these_successor_rows = numpy.sort(these_successor_rows) these_expected_rows = numpy.sort( numpy.array(FIRST_SUCCESSOR_DICT_5SEC[i], dtype=int)) self.assertTrue( numpy.array_equal(these_successor_rows, these_expected_rows))
def test_find_successors_first_1sec_all(self): """Ensures correct output from find_successors. In this case, working on the first table with max time difference = 1 seconds and returning all successors on path to earliest. """ this_num_storm_objects = len(FIRST_STORM_OBJECT_TABLE.index) for i in range(this_num_storm_objects): these_successor_rows = temporal_tracking.find_successors( storm_object_table=FIRST_STORM_OBJECT_TABLE, target_row=i, num_seconds_forward=1, return_all_on_path=True) these_successor_rows = numpy.sort(these_successor_rows) these_expected_rows = numpy.sort( numpy.array(FIRST_SUCCESSOR_DICT_1SEC_ALL[i], dtype=int)) self.assertTrue( numpy.array_equal(these_successor_rows, these_expected_rows))
def _run(storm_metafile_name, top_tracking_dir_name, lead_time_seconds, output_file_name): """Plots spatial distribution of examples (storm objects) in file. This is effectively the main method. :param storm_metafile_name: See documentation at top of file. :param top_tracking_dir_name: Same. :param lead_time_seconds: Same. :param output_file_name: Same. """ file_system_utils.mkdir_recursive_if_necessary(file_name=output_file_name) # Read storm metadata. print( 'Reading storm metadata from: "{0:s}"...'.format(storm_metafile_name)) orig_full_id_strings, orig_times_unix_sec = ( tracking_io.read_ids_and_times(storm_metafile_name)) orig_primary_id_strings = temporal_tracking.full_to_partial_ids( orig_full_id_strings)[0] # Find relevant tracking files. spc_date_strings = [ time_conversion.time_to_spc_date_string(t) for t in orig_times_unix_sec ] spc_date_strings += [ time_conversion.time_to_spc_date_string(t + lead_time_seconds) for t in orig_times_unix_sec ] spc_date_strings = list(set(spc_date_strings)) tracking_file_names = [] for this_spc_date_string in spc_date_strings: tracking_file_names += tracking_io.find_files_one_spc_date( top_tracking_dir_name=top_tracking_dir_name, tracking_scale_metres2=DUMMY_TRACKING_SCALE_METRES2, source_name=tracking_utils.SEGMOTION_NAME, spc_date_string=this_spc_date_string, raise_error_if_missing=False)[0] file_times_unix_sec = numpy.array( [tracking_io.file_name_to_time(f) for f in tracking_file_names], dtype=int) num_orig_storm_objects = len(orig_full_id_strings) num_files = len(file_times_unix_sec) keep_file_flags = numpy.full(num_files, 0, dtype=bool) for i in range(num_orig_storm_objects): these_flags = numpy.logical_and( file_times_unix_sec >= orig_times_unix_sec[i], file_times_unix_sec <= orig_times_unix_sec[i] + lead_time_seconds) keep_file_flags = numpy.logical_or(keep_file_flags, these_flags) del file_times_unix_sec keep_file_indices = numpy.where(keep_file_flags)[0] tracking_file_names = [tracking_file_names[k] for k in keep_file_indices] # Read relevant tracking files. num_files = len(tracking_file_names) storm_object_tables = [None] * num_files print(SEPARATOR_STRING) for i in range(num_files): print('Reading data from: "{0:s}"...'.format(tracking_file_names[i])) this_table = tracking_io.read_file(tracking_file_names[i]) storm_object_tables[i] = this_table.loc[this_table[ tracking_utils.PRIMARY_ID_COLUMN].isin( numpy.array(orig_primary_id_strings))] if i == 0: continue storm_object_tables[i] = storm_object_tables[i].align( storm_object_tables[0], axis=1)[0] storm_object_table = pandas.concat(storm_object_tables, axis=0, ignore_index=True) print(SEPARATOR_STRING) # Find relevant storm objects. orig_object_rows = tracking_utils.find_storm_objects( all_id_strings=storm_object_table[ tracking_utils.FULL_ID_COLUMN].values.tolist(), all_times_unix_sec=storm_object_table[ tracking_utils.VALID_TIME_COLUMN].values, id_strings_to_keep=orig_full_id_strings, times_to_keep_unix_sec=orig_times_unix_sec) good_object_rows = numpy.array([], dtype=int) for i in range(num_orig_storm_objects): # Non-merging successors only! first_rows = temporal_tracking.find_successors( storm_object_table=storm_object_table, target_row=orig_object_rows[i], num_seconds_forward=lead_time_seconds, max_num_sec_id_changes=1, change_type_string=temporal_tracking.SPLIT_STRING, return_all_on_path=True) second_rows = temporal_tracking.find_successors( storm_object_table=storm_object_table, target_row=orig_object_rows[i], num_seconds_forward=lead_time_seconds, max_num_sec_id_changes=0, change_type_string=temporal_tracking.MERGER_STRING, return_all_on_path=True) first_rows = first_rows.tolist() second_rows = second_rows.tolist() these_rows = set(first_rows) & set(second_rows) these_rows = numpy.array(list(these_rows), dtype=int) good_object_rows = numpy.concatenate((good_object_rows, these_rows)) good_object_rows = numpy.unique(good_object_rows) storm_object_table = storm_object_table.iloc[good_object_rows] times_of_day_sec = numpy.mod( storm_object_table[tracking_utils.VALID_TIME_COLUMN].values, NUM_SECONDS_IN_DAY) storm_object_table = storm_object_table.assign( **{tracking_utils.VALID_TIME_COLUMN: times_of_day_sec}) min_plot_latitude_deg = -LATLNG_BUFFER_DEG + numpy.min( storm_object_table[tracking_utils.CENTROID_LATITUDE_COLUMN].values) max_plot_latitude_deg = LATLNG_BUFFER_DEG + numpy.max( storm_object_table[tracking_utils.CENTROID_LATITUDE_COLUMN].values) min_plot_longitude_deg = -LATLNG_BUFFER_DEG + numpy.min( storm_object_table[tracking_utils.CENTROID_LONGITUDE_COLUMN].values) max_plot_longitude_deg = LATLNG_BUFFER_DEG + numpy.max( storm_object_table[tracking_utils.CENTROID_LONGITUDE_COLUMN].values) _, axes_object, basemap_object = ( plotting_utils.create_equidist_cylindrical_map( min_latitude_deg=min_plot_latitude_deg, max_latitude_deg=max_plot_latitude_deg, min_longitude_deg=min_plot_longitude_deg, max_longitude_deg=max_plot_longitude_deg, resolution_string='i')) plotting_utils.plot_coastlines(basemap_object=basemap_object, axes_object=axes_object, line_colour=BORDER_COLOUR, line_width=BORDER_WIDTH * 2) plotting_utils.plot_countries(basemap_object=basemap_object, axes_object=axes_object, line_colour=BORDER_COLOUR, line_width=BORDER_WIDTH) plotting_utils.plot_states_and_provinces(basemap_object=basemap_object, axes_object=axes_object, line_colour=BORDER_COLOUR, line_width=BORDER_WIDTH) plotting_utils.plot_parallels(basemap_object=basemap_object, axes_object=axes_object, num_parallels=NUM_PARALLELS, line_width=BORDER_WIDTH) plotting_utils.plot_meridians(basemap_object=basemap_object, axes_object=axes_object, num_meridians=NUM_MERIDIANS, line_width=BORDER_WIDTH) # colour_bar_object = storm_plotting.plot_storm_tracks( # storm_object_table=storm_object_table, axes_object=axes_object, # basemap_object=basemap_object, colour_map_object=COLOUR_MAP_OBJECT, # colour_min_unix_sec=0, colour_max_unix_sec=NUM_SECONDS_IN_DAY - 1, # line_width=TRACK_LINE_WIDTH, # start_marker_type=None, end_marker_type=None # ) colour_bar_object = storm_plotting.plot_storm_centroids( storm_object_table=storm_object_table, axes_object=axes_object, basemap_object=basemap_object, colour_map_object=COLOUR_MAP_OBJECT, colour_min_unix_sec=0, colour_max_unix_sec=NUM_SECONDS_IN_DAY - 1) tick_times_unix_sec = numpy.linspace(0, NUM_SECONDS_IN_DAY, num=NUM_HOURS_IN_DAY + 1, dtype=int) tick_times_unix_sec = tick_times_unix_sec[:-1] tick_times_unix_sec = tick_times_unix_sec[::2] tick_time_strings = [ time_conversion.unix_sec_to_string(t, COLOUR_BAR_TIME_FORMAT) for t in tick_times_unix_sec ] colour_bar_object.set_ticks(tick_times_unix_sec) colour_bar_object.set_ticklabels(tick_time_strings) print('Saving figure to: "{0:s}"...'.format(output_file_name)) pyplot.savefig(output_file_name, dpi=FIGURE_RESOLUTION_DPI, pad_inches=0, bbox_inches='tight') pyplot.close()