def test_interp_in_time_next(self): """Ensures correct output from interp_in_time. In this case, method is next-neighbour. """ this_query_matrix = interp.interp_in_time( INPUT_MATRIX_FOR_TEMPORAL_INTERP, sorted_input_times_unix_sec=INPUT_TIMES_UNIX_SEC, query_times_unix_sec=NEXT_INTERP_TIMES_UNIX_SEC, method_string=interp.NEXT_INTERP_METHOD) self.assertTrue( numpy.allclose(this_query_matrix, EXPECTED_MATRIX_FOR_NEXT_INTERP, atol=TOLERANCE))
def test_interp_in_time_linear_extrap(self): """Ensures correct output from interp_in_time. In this case, method is linear with extrapolation. """ this_query_matrix = interp.interp_in_time( INPUT_MATRIX_FOR_TEMPORAL_INTERP, sorted_input_times_unix_sec=INPUT_TIMES_UNIX_SEC, query_times_unix_sec=LINEAR_EXTRAP_TIMES_UNIX_SEC, method_string=interp.LINEAR_INTERP_METHOD, allow_extrap=True) self.assertTrue( numpy.allclose(this_query_matrix, EXPECTED_MATRIX_FOR_LINEAR_EXTRAP, atol=TOLERANCE))
def test_interp_in_time_next(self): """Ensures correct output from interp_in_time. In this case, interpolation method is next-neighbour. """ this_interp_matrix = interp.interp_in_time( input_matrix=INPUT_MATRIX_FOR_TEMPORAL_INTERP, sorted_input_times_unix_sec=INPUT_TIMES_UNIX_SEC, query_times_unix_sec=QUERY_TIMES_FOR_NEXT_NEIGH_UNIX_SEC, method_string=interp.NEXT_NEIGHBOUR_METHOD_STRING, extrapolate=False) self.assertTrue( numpy.allclose(this_interp_matrix, INTERP_MATRIX_NEXT_TIME, atol=TOLERANCE))
def test_interp_in_time_extrap(self): """Ensures correct output from interp_in_time. In this case, interp_in_time will do only extrapolation. """ this_interp_matrix = interp.interp_in_time( input_matrix=INPUT_MATRIX_FOR_TEMPORAL_INTERP, sorted_input_times_unix_sec=INPUT_TIMES_UNIX_SEC, query_times_unix_sec=QUERY_TIMES_FOR_EXTRAP_UNIX_SEC, method_string=interp.LINEAR_METHOD_STRING, extrapolate=True) self.assertTrue( numpy.allclose(this_interp_matrix, TIME_EXTRAP_MATRIX, atol=TOLERANCE))
def _interp_one_storm_in_time(storm_object_table_1cell, storm_id=None, query_time_unix_sec=None): """Interpolates location of one storm cell in time. The storm object nearest to the query time is advected as a whole -- i.e., all vertices are moved by the same distance and in the same direction -- so that the interpolated storm object has a realistic shape. N = number of storm objects (snapshots of storm cell) V = number of vertices in a given storm object :param storm_object_table_1cell: N-row pandas DataFrame with the following columns. storm_object_table_1cell.unix_time_sec: Time of snapshot. storm_object_table_1cell.centroid_x_metres: x-coordinate of centroid. storm_object_table_1cell.centroid_y_metres: y-coordinate of centroid. storm_object_table_1cell.vertices_x_metres: length-V numpy array with x- coordinates of vertices. storm_object_table_1cell.vertices_y_metres: length-V numpy array with y- coordinates of vertices. :param storm_id: String ID for storm cell. :param query_time_unix_sec: Storm location will be interpolated to this time. :return: interp_vertex_table_1object: pandas DataFrame with the following columns (each row is one vertex of the interpolated storm object). interp_vertex_table_1object.storm_id: String ID for storm cell. interp_vertex_table_1object.vertex_x_metres: x-coordinate of vertex. interp_vertex_table_1object.vertex_y_metres: y-coordinate of vertex. """ sort_indices = numpy.argsort( storm_object_table_1cell[tracking_io.TIME_COLUMN].values) centroid_matrix = numpy.vstack( (storm_object_table_1cell[CENTROID_X_COLUMN].values[sort_indices], storm_object_table_1cell[CENTROID_Y_COLUMN].values[sort_indices])) interp_centroid_vector = interp.interp_in_time( centroid_matrix, sorted_input_times_unix_sec=storm_object_table_1cell[ tracking_io.TIME_COLUMN].values[sort_indices], query_times_unix_sec=numpy.array([query_time_unix_sec]), allow_extrap=True) absolute_time_diffs_sec = numpy.absolute( storm_object_table_1cell[tracking_io.TIME_COLUMN].values - query_time_unix_sec) nearest_time_index = numpy.argmin(absolute_time_diffs_sec) x_diff_metres = interp_centroid_vector[0] - storm_object_table_1cell[ CENTROID_X_COLUMN].values[nearest_time_index] y_diff_metres = interp_centroid_vector[1] - storm_object_table_1cell[ CENTROID_Y_COLUMN].values[nearest_time_index] num_vertices = len( storm_object_table_1cell[VERTICES_X_COLUMN].values[nearest_time_index]) storm_id_list = [storm_id] * num_vertices interp_vertex_dict_1object = { tracking_io.STORM_ID_COLUMN: storm_id_list, VERTEX_X_COLUMN: storm_object_table_1cell[VERTICES_X_COLUMN].values[nearest_time_index] + x_diff_metres, VERTEX_Y_COLUMN: storm_object_table_1cell[VERTICES_Y_COLUMN].values[nearest_time_index] + y_diff_metres } return pandas.DataFrame.from_dict(interp_vertex_dict_1object)
def interp_tornadoes_along_tracks(tornado_table, interp_time_interval_sec): """Interpolates each tornado to many points along its track. :param tornado_table: See doc for `write_processed_file`. :param interp_time_interval_sec: Will interpolate at this time interval between start and end points. :return: tornado_segment_table: pandas DataFrame with the following columns, where each row is one tornado-track segment. tornado_segment_table.valid_time_unix_sec: Valid time. tornado_segment_table.latitude_deg: Latitude (deg N). tornado_segment_table.longitude_deg: Longitude (deg E). tornado_segment_table.tornado_id_string: Tornado ID. tornado_segment_table.fujita_rating: F-scale or EF-scale rating (integer from 0...5). """ # TODO(thunderhoser): Return "width" column as well. num_tornadoes = len(tornado_table.index) tornado_times_unix_sec = numpy.array([], dtype=int) tornado_latitudes_deg = numpy.array([]) tornado_longitudes_deg = numpy.array([]) tornado_id_strings = [] fujita_rating_strings = [] for j in range(num_tornadoes): this_start_time_unix_sec = tornado_table[START_TIME_COLUMN].values[j] this_end_time_unix_sec = tornado_table[END_TIME_COLUMN].values[j] this_num_query_times = 1 + int( numpy.round( float(this_end_time_unix_sec - this_start_time_unix_sec) / interp_time_interval_sec)) these_query_times_unix_sec = numpy.linspace(this_start_time_unix_sec, this_end_time_unix_sec, num=this_num_query_times, dtype=float) these_query_times_unix_sec = numpy.round( these_query_times_unix_sec).astype(int) these_input_latitudes_deg = numpy.array([ tornado_table[START_LAT_COLUMN].values[j], tornado_table[END_LAT_COLUMN].values[j] ]) these_input_longitudes_deg = numpy.array([ tornado_table[START_LNG_COLUMN].values[j], tornado_table[END_LNG_COLUMN].values[j] ]) these_input_times_unix_sec = numpy.array([ tornado_table[START_TIME_COLUMN].values[j], tornado_table[END_TIME_COLUMN].values[j] ], dtype=int) if this_num_query_times == 1: this_query_coord_matrix = numpy.array( [these_input_longitudes_deg[0], these_input_latitudes_deg[0]]) this_query_coord_matrix = numpy.reshape(this_query_coord_matrix, (2, 1)) else: this_input_coord_matrix = numpy.vstack( (these_input_longitudes_deg, these_input_latitudes_deg)) this_query_coord_matrix = interp.interp_in_time( input_matrix=this_input_coord_matrix, sorted_input_times_unix_sec=these_input_times_unix_sec, query_times_unix_sec=these_query_times_unix_sec, method_string=interp.LINEAR_METHOD_STRING, extrapolate=False) tornado_times_unix_sec = numpy.concatenate( (tornado_times_unix_sec, these_query_times_unix_sec)) tornado_latitudes_deg = numpy.concatenate( (tornado_latitudes_deg, this_query_coord_matrix[1, :])) tornado_longitudes_deg = numpy.concatenate( (tornado_longitudes_deg, this_query_coord_matrix[0, :])) this_id_string = create_tornado_id( start_time_unix_sec=these_input_times_unix_sec[0], start_latitude_deg=these_input_latitudes_deg[0], start_longitude_deg=these_input_longitudes_deg[0]) tornado_id_strings += ([this_id_string] * len(these_query_times_unix_sec)) fujita_rating_strings += ( [tornado_table[FUJITA_RATING_COLUMN].values[j]] * len(these_query_times_unix_sec)) return pandas.DataFrame.from_dict({ TIME_COLUMN: tornado_times_unix_sec, LATITUDE_COLUMN: tornado_latitudes_deg, LONGITUDE_COLUMN: tornado_longitudes_deg, TORNADO_ID_COLUMN: tornado_id_strings, FUJITA_RATING_COLUMN: fujita_rating_strings })