def sia_for_closed_polygon(
        polygon_object,
        num_vertices_in_half_window=NUM_VERTICES_IN_HALF_WINDOW_DEFAULT,
        num_iterations=NUM_ITERATIONS_DEFAULT, check_input_args=True):
    """Implements the SIA algorithm for a closed polygon.

    This method smooths only the exterior of the polygon, ignoring the interior
    (holes).

    V = number of exterior vertices

    :param polygon_object: Instance of `shapely.geometry.Polygon`.
    :param num_vertices_in_half_window: Number of vertices in smoothing half-
        window.  Number of vertices in full window =
        2 * num_vertices_in_half_window + 1.
    :param num_iterations: Number of iterations.
    :param check_input_args: Boolean flag.  If True, will error-check input
        arguments.  If False, will not.
    :return: vertex_x_coords_smoothed: length-V numpy array with smoothed
        x-coordinates of vertices.
    :return: vertex_y_coords_smoothed: length-V numpy array with smoothed
        y-coordinates of vertices.
    """

    num_vertices = len(polygon_object.exterior.xy[0]) - 1

    if check_input_args:
        error_checking.assert_is_geq(
            num_vertices, MIN_VERTICES_IN_POLYGON_OR_LINE)
        error_checking.assert_is_integer(num_vertices_in_half_window)
        error_checking.assert_is_geq(num_vertices_in_half_window, 1)
        error_checking.assert_is_integer(num_iterations)
        error_checking.assert_is_geq(num_iterations, 1)

    num_vertices_in_half_window = numpy.min(
        numpy.array([num_vertices_in_half_window, num_vertices - 1]))

    for i in range(num_iterations):
        if i == 0:
            this_polygon_object = copy.deepcopy(polygon_object)
        else:
            this_polygon_object = polygons.vertex_arrays_to_polygon_object(
                vertex_x_coords_smoothed, vertex_y_coords_smoothed)

        vertex_x_coords_padded, vertex_y_coords_padded = (
            shape_utils.pad_closed_polygon(
                this_polygon_object,
                num_padding_vertices=num_vertices_in_half_window,
                check_input_args=False))

        vertex_x_coords_smoothed, vertex_y_coords_smoothed = _sia_one_iteration(
            vertex_x_coords_padded, vertex_y_coords_padded,
            num_vertices_in_half_window)

    vertex_x_coords_smoothed = numpy.concatenate((
        vertex_x_coords_smoothed, numpy.array([vertex_x_coords_smoothed[0]])))
    vertex_y_coords_smoothed = numpy.concatenate((
        vertex_y_coords_smoothed, numpy.array([vertex_y_coords_smoothed[0]])))

    return vertex_x_coords_smoothed, vertex_y_coords_smoothed
Exemple #2
0
def _project_polygon_latlng_to_xy(polygon_object_latlng,
                                  centroid_latitude_deg=None,
                                  centroid_longitude_deg=None):
    """Projects polygon from lat-long to x-y coordinates.

    :param polygon_object_latlng: Instance of `shapely.geometry.Polygon`, where
        x-coordinates are actually longitudes and y-coordinates are actually
        latitudes.
    :param centroid_latitude_deg: Latitude (deg N) at polygon centroid.
    :param centroid_longitude_deg: Longitude (deg E) at polygon centroid.
    :return: polygon_object_xy: Instance of `shapely.geometry.Polygon`, where x-
        and y-coordinates are in metres.
    """

    projection_object = projections.init_lambert_conformal_projection(
        standard_latitudes_deg=numpy.array(
            [centroid_latitude_deg, centroid_latitude_deg]),
        central_longitude_deg=centroid_longitude_deg)

    vertex_latitudes_deg = numpy.asarray(polygon_object_latlng.exterior.xy[1])
    vertex_longitudes_deg = numpy.asarray(polygon_object_latlng.exterior.xy[0])
    vertex_x_metres, vertex_y_metres = projections.project_latlng_to_xy(
        vertex_latitudes_deg,
        vertex_longitudes_deg,
        projection_object=projection_object,
        false_easting_metres=0.,
        false_northing_metres=0.)

    return polygons.vertex_arrays_to_polygon_object(vertex_x_metres,
                                                    vertex_y_metres)
Exemple #3
0
def _confidence_interval_to_polygon(x_values, y_values_bottom, y_values_top):
    """Turns confidence interval into polygon.

    P = number of points

    :param x_values: length-P numpy array of x-values.
    :param y_values_bottom: length-P numpy array of y-values at bottom of
        confidence interval.
    :param y_values_top: Same but top of confidence interval.
    :return: polygon_object: Instance of `shapely.geometry.Polygon`.
    """

    real_indices = numpy.where(numpy.invert(numpy.isnan(y_values_bottom)))[0]

    if len(real_indices) == 0:
        return None

    real_x_values = x_values[real_indices]
    real_y_values_bottom = y_values_bottom[real_indices]
    real_y_values_top = y_values_top[real_indices]

    these_x = numpy.concatenate(
        (real_x_values, real_x_values[::-1], real_x_values[[0]]))
    these_y = numpy.concatenate((real_y_values_top, real_y_values_bottom[::-1],
                                 real_y_values_top[[0]]))

    return polygons.vertex_arrays_to_polygon_object(exterior_x_coords=these_x,
                                                    exterior_y_coords=these_y)
def _grid_cell_to_polygon(grid_point_x_metres, grid_point_y_metres,
                          x_spacing_metres, y_spacing_metres):
    """Converts grid cell from center point to polygon.

    This method assumes that the grid has uniform spacing in both x- and y-
    directions.  In other words, the grid is regular in x-y (and not, for
    example, lat-long) coords.

    :param grid_point_x_metres: x-coordinate of center point.
    :param grid_point_y_metres: y-coordinate of center point.
    :param x_spacing_metres: Spacing between adjacent points along x-axis.
    :param y_spacing_metres: Spacing between adjacent points along y-axis.
    :return: polygon_object_xy_metres: `shapely.geometry.Polygon` object, where
        each vertex is a corner of the grid cell.  Coordinates are still in
        metres.
    """

    x_min_metres = grid_point_x_metres - x_spacing_metres / 2
    x_max_metres = grid_point_x_metres + x_spacing_metres / 2
    y_min_metres = grid_point_y_metres - y_spacing_metres / 2
    y_max_metres = grid_point_y_metres + y_spacing_metres / 2

    vertex_x_coords_metres = numpy.array(
        [x_min_metres, x_max_metres, x_max_metres, x_min_metres, x_min_metres])
    vertex_y_coords_metres = numpy.array(
        [y_min_metres, y_min_metres, y_max_metres, y_max_metres, y_min_metres])

    return polygons.vertex_arrays_to_polygon_object(
        exterior_x_coords=vertex_x_coords_metres,
        exterior_y_coords=vertex_y_coords_metres)
Exemple #5
0
def _confidence_interval_to_polygon(x_coords_bottom,
                                    y_coords_bottom,
                                    x_coords_top,
                                    y_coords_top,
                                    for_performance_diagram=False):
    """Generates polygon for confidence interval.

    P = number of points in bottom curve = number of points in top curve

    :param x_coords_bottom: length-P numpy with x-coordinates of bottom curve
        (lower end of confidence interval).
    :param y_coords_bottom: Same but for y-coordinates.
    :param x_coords_top: length-P numpy with x-coordinates of top curve (upper
        end of confidence interval).
    :param y_coords_top: Same but for y-coordinates.
    :param for_performance_diagram: Boolean flag.  If True, confidence interval
        is for a performance diagram, which means that coordinates will be
        sorted in a slightly different way.
    :return: polygon_object: Instance of `shapely.geometry.Polygon`.
    """

    nan_flags_top = numpy.logical_or(numpy.isnan(x_coords_top),
                                     numpy.isnan(y_coords_top))
    real_indices_top = numpy.where(numpy.invert(nan_flags_top))[0]

    nan_flags_bottom = numpy.logical_or(numpy.isnan(x_coords_bottom),
                                        numpy.isnan(y_coords_bottom))
    real_indices_bottom = numpy.where(numpy.invert(nan_flags_bottom))[0]

    if for_performance_diagram:
        y_coords_top = y_coords_top[real_indices_top]
        sort_indices_top = numpy.argsort(y_coords_top)
        y_coords_top = y_coords_top[sort_indices_top]
        x_coords_top = x_coords_top[real_indices_top][sort_indices_top]

        y_coords_bottom = y_coords_bottom[real_indices_bottom]
        sort_indices_bottom = numpy.argsort(-y_coords_bottom)
        y_coords_bottom = y_coords_bottom[sort_indices_bottom]
        x_coords_bottom = x_coords_bottom[real_indices_bottom][
            sort_indices_bottom]
    else:
        x_coords_top = x_coords_top[real_indices_top]
        sort_indices_top = numpy.argsort(-x_coords_top)
        x_coords_top = x_coords_top[sort_indices_top]
        y_coords_top = y_coords_top[real_indices_top][sort_indices_top]

        x_coords_bottom = x_coords_bottom[real_indices_bottom]
        sort_indices_bottom = numpy.argsort(x_coords_bottom)
        x_coords_bottom = x_coords_bottom[sort_indices_bottom]
        y_coords_bottom = y_coords_bottom[real_indices_bottom][
            sort_indices_bottom]

    polygon_x_coords = numpy.concatenate(
        (x_coords_top, x_coords_bottom, numpy.array([x_coords_top[0]])))
    polygon_y_coords = numpy.concatenate(
        (y_coords_top, y_coords_bottom, numpy.array([y_coords_top[0]])))

    return polygons.vertex_arrays_to_polygon_object(polygon_x_coords,
                                                    polygon_y_coords)
    def test_vertex_arrays_to_polygon_object(self):
        """Ensures correct output from vertex_arrays_to_polygon_object."""

        this_polygon_object = polygons.vertex_arrays_to_polygon_object(
            EXTERIOR_VERTEX_X_METRES,
            EXTERIOR_VERTEX_Y_METRES,
            hole_x_coords_list=[HOLE1_VERTEX_X_METRES, HOLE2_VERTEX_X_METRES],
            hole_y_coords_list=[HOLE1_VERTEX_Y_METRES, HOLE2_VERTEX_Y_METRES])

        self.assertTrue(
            this_polygon_object.almost_equals(POLYGON_OBJECT_2HOLES_XY_METRES,
                                              decimal=TOLERANCE_DECIMAL_PLACE))
def _grid_cell_to_polygon(grid_row, grid_column):
    """Converts grid cell from single point to polygon.

    :param grid_row: Row index.
    :param grid_column: Column index.
    :return: polygon_object: Instance of `shapely.geometry.Polygon`.
    """

    vertex_rows = grid_row + numpy.array([-0.5, -0.5, 0.5, 0.5, -0.5])
    vertex_columns = grid_column + numpy.array([-0.5, 0.5, 0.5, -0.5, -0.5])

    return polygons.vertex_arrays_to_polygon_object(
        exterior_x_coords=vertex_columns, exterior_y_coords=vertex_rows)
    def test_vertex_arrays_to_polygon_object(self):
        """Ensures correct output from vertex_arrays_to_polygon_object.

        This is not strictly a unit test.  vertex_arrays_to_polygon_object is
        used to convert to a polygon object, and then
        polygon_object_to_vertex_arrays is used to convert back to vertex
        arrays.  The output of polygon_object_to_vertex_arrays is compared with
        the input to vertex_arrays_to_polygon_object, and both sets of vertex
        arrays must be equal.
        """

        this_polygon_object = polygons.vertex_arrays_to_polygon_object(
            EXTERIOR_VERTEX_X_METRES,
            EXTERIOR_VERTEX_Y_METRES,
            hole_x_coords_list=[HOLE1_VERTEX_X_METRES, HOLE2_VERTEX_X_METRES],
            hole_y_coords_list=[HOLE1_VERTEX_Y_METRES, HOLE2_VERTEX_Y_METRES])

        this_vertex_dict = polygons.polygon_object_to_vertex_arrays(
            this_polygon_object)

        self.assertTrue(
            numpy.allclose(this_vertex_dict[polygons.EXTERIOR_X_COLUMN],
                           EXTERIOR_VERTEX_X_METRES,
                           atol=TOLERANCE))
        self.assertTrue(
            numpy.allclose(this_vertex_dict[polygons.EXTERIOR_Y_COLUMN],
                           EXTERIOR_VERTEX_Y_METRES,
                           atol=TOLERANCE))

        self.assertTrue(
            len(this_vertex_dict[polygons.HOLE_X_COLUMN]) == NUM_HOLES)
        self.assertTrue(
            numpy.allclose(this_vertex_dict[polygons.HOLE_X_COLUMN][0],
                           HOLE1_VERTEX_X_METRES,
                           atol=TOLERANCE))
        self.assertTrue(
            numpy.allclose(this_vertex_dict[polygons.HOLE_Y_COLUMN][0],
                           HOLE1_VERTEX_Y_METRES,
                           atol=TOLERANCE))

        self.assertTrue(
            numpy.allclose(this_vertex_dict[polygons.HOLE_X_COLUMN][1],
                           HOLE2_VERTEX_X_METRES,
                           atol=TOLERANCE))
        self.assertTrue(
            numpy.allclose(this_vertex_dict[polygons.HOLE_Y_COLUMN][1],
                           HOLE2_VERTEX_Y_METRES,
                           atol=TOLERANCE))
Exemple #9
0
def plot_storm_outline_filled(basemap_object,
                              axes_object,
                              polygon_object_latlng,
                              line_colour=DEFAULT_POLYGON_LINE_COLOUR,
                              line_width=DEFAULT_POLYGON_LINE_WIDTH,
                              fill_colour=DEFAULT_POLYGON_FILL_COLOUR,
                              opacity=DEFAULT_POLYGON_FILL_OPACITY):
    """Plots storm outline (or buffer around storm outline) as filled polygon.

    :param basemap_object: Instance of `mpl_toolkits.basemap.Basemap`.
    :param axes_object: Instance of `matplotlib.axes._subplots.AxesSubplot`.
    :param polygon_object_latlng: `shapely.geometry.Polygon` object with
        vertices in lat-long coordinates.
    :param line_colour: Colour of polygon edge (in any format accepted by
        `matplotlib.colors`).
    :param line_width: Width of polygon edge.
    :param fill_colour: Colour of polygon interior.
    :param opacity: Opacity of polygon fill (in range 0...1).
    """

    vertex_dict = polygons.polygon_object_to_vertex_arrays(
        polygon_object_latlng)
    exterior_x_coords_metres, exterior_y_coords_metres = basemap_object(
        vertex_dict[polygons.EXTERIOR_X_COLUMN],
        vertex_dict[polygons.EXTERIOR_Y_COLUMN])

    num_holes = len(vertex_dict[polygons.HOLE_X_COLUMN])
    x_coords_by_hole_metres = [None] * num_holes
    y_coords_by_hole_metres = [None] * num_holes

    for i in range(num_holes):
        x_coords_by_hole_metres[i], y_coords_by_hole_metres[
            i] = basemap_object(vertex_dict[polygons.HOLE_X_COLUMN][i],
                                vertex_dict[polygons.HOLE_Y_COLUMN][i])

    polygon_object_xy = polygons.vertex_arrays_to_polygon_object(
        exterior_x_coords=exterior_x_coords_metres,
        exterior_y_coords=exterior_y_coords_metres,
        hole_x_coords_list=x_coords_by_hole_metres,
        hole_y_coords_list=y_coords_by_hole_metres)

    polygon_patch = PolygonPatch(polygon_object_xy,
                                 lw=line_width,
                                 ec=line_colour,
                                 fc=fill_colour,
                                 alpha=opacity)
    axes_object.add_patch(polygon_patch)
Exemple #10
0
def _vertex_list_to_polygon_list(vertex_rows, vertex_columns):
    """This method is the inverse of `_polygon_list_to_vertex_list`.

    P = number of polygons

    :param vertex_rows: See doc for `_polygon_list_to_vertex_list`.
    :param vertex_columns: Same.
    :return: polygon_objects_grid_coords: Same.
    :return: polygon_to_first_vertex_indices: length-P numpy array of indices.
        If polygon_to_first_vertex_indices[j] = i, the first vertex in the
        [j]th polygon is the [i]th vertex in the input arrays.
    :raises: ValueError: if row and column lists have NaN's at different
        locations.
    """

    if len(vertex_rows) == 0:
        return [], numpy.array([], dtype=int)

    nan_row_indices = numpy.where(numpy.isnan(vertex_rows))[0]
    nan_column_indices = numpy.where(numpy.isnan(vertex_columns))[0]

    if not numpy.array_equal(nan_row_indices, nan_column_indices):
        error_string = ('Row ({0:s}) and column ({1:s}) lists have NaN'
                        's at different '
                        'locations.').format(str(nan_row_indices),
                                             str(nan_column_indices))

        raise ValueError(error_string)

    polygon_to_first_vertex_indices = numpy.concatenate(
        (numpy.array([0], dtype=int), nan_row_indices + 1))

    vertex_rows_by_polygon = general_utils.split_array_by_nan(vertex_rows)
    vertex_columns_by_polygon = general_utils.split_array_by_nan(
        vertex_columns)

    num_polygons = len(vertex_rows_by_polygon)
    polygon_objects_grid_coords = []

    for i in range(num_polygons):
        this_polygon_object = polygons.vertex_arrays_to_polygon_object(
            exterior_x_coords=vertex_columns_by_polygon[i],
            exterior_y_coords=vertex_rows_by_polygon[i])

        polygon_objects_grid_coords.append(this_polygon_object)

    return polygon_objects_grid_coords, polygon_to_first_vertex_indices
def plot_filled_polygon(basemap_object=None, axes_object=None,
                        vertex_latitudes_deg=None, vertex_longitudes_deg=None,
                        line_colour=DEFAULT_POLY_LINE_COLOUR,
                        line_width=DEFAULT_POLY_LINE_WIDTH,
                        fill_colour=DEFAULT_POLY_FILL_COLOUR,
                        opacity=DEFAULT_POLY_FILL_OPACITY):
    """Plots filled polygon (either storm object or buffer around storm object).

    :param basemap_object: Instance of `mpl_toolkits.basemap.Basemap`.
    :param axes_object: Instance of `matplotlib.axes._subplots.AxesSubplot`.
    :param vertex_latitudes_deg: length-V numpy array of latitudes (deg N).
    :param vertex_longitudes_deg: length-V numpy array of longitudes (deg E).
    :param line_colour: Colour of polygon edge (in any format accepted by
        `matplotlib.colors`).
    :param line_width: Width of polygon edge.
    :param fill_colour: Colour of polygon interior.
    :param opacity: Opacity of polygon fill (in range 0...1).
    """

    # TODO(thunderhoser): input should be a `shapely.geometry.Polygon` object.

    vertex_dict = polygons.separate_exterior_and_holes(
        vertex_longitudes_deg, vertex_latitudes_deg)
    (exterior_x_metres, exterior_y_metres) = basemap_object(
        vertex_dict[polygons.EXTERIOR_X_COLUMN],
        vertex_dict[polygons.EXTERIOR_Y_COLUMN])

    num_holes = len(vertex_dict[polygons.HOLE_X_COLUMN])
    hole_list_x_metres = [None] * num_holes
    hole_list_y_metres = [None] * num_holes

    for i in range(num_holes):
        (hole_list_x_metres[i], hole_list_y_metres[i]) = basemap_object(
            numpy.flipud(vertex_dict[polygons.HOLE_X_COLUMN][i]),
            numpy.flipud(vertex_dict[polygons.HOLE_Y_COLUMN][i]))

    polygon_object = polygons.vertex_arrays_to_polygon_object(
        exterior_x_metres, exterior_y_metres,
        hole_x_coords_list=hole_list_x_metres,
        hole_y_coords_list=hole_list_y_metres)

    polygon_patch = PolygonPatch(
        polygon_object, lw=line_width, ec=line_colour, fc=fill_colour,
        alpha=opacity)
    axes_object.add_patch(polygon_patch)
def erode_boundary(latitudes_deg, longitudes_deg, erosion_distance_metres):
    """Erodes boundary.

    Erosion is the same thing as applying a negative buffer distance.  The new
    boundary will be contained inside the old boundary.

    :param latitudes_deg: See doc for `_check_boundary`.
    :param longitudes_deg: Same.
    :param erosion_distance_metres: Erosion distance.
    :return: latitudes_deg: Eroded version of input.
    :return: longitudes_deg: Eroded version of input.
    """

    longitudes_deg = _check_boundary(latitudes_deg=latitudes_deg,
                                     longitudes_deg=longitudes_deg)
    error_checking.assert_is_greater(erosion_distance_metres, 0.)

    polygon_object_latlng = polygons.vertex_arrays_to_polygon_object(
        exterior_x_coords=longitudes_deg, exterior_y_coords=latitudes_deg)
    polygon_object_xy, projection_object = polygons.project_latlng_to_xy(
        polygon_object_latlng=polygon_object_latlng)
    polygon_object_xy = polygon_object_xy.buffer(
        -erosion_distance_metres, join_style=shapely.geometry.JOIN_STYLE.round)

    if 'MultiPolygon' in str(type(polygon_object_xy)):
        polygon_object_xy = list(polygon_object_xy)[0]

    polygon_object_latlng = polygons.project_xy_to_latlng(
        polygon_object_xy_metres=polygon_object_xy,
        projection_object=projection_object)
    polygon_dict_latlng = polygons.polygon_object_to_vertex_arrays(
        polygon_object_latlng)

    latitudes_deg = polygon_dict_latlng[polygons.EXTERIOR_Y_COLUMN]
    longitudes_deg = polygon_dict_latlng[polygons.EXTERIOR_X_COLUMN]
    longitudes_deg = _check_boundary(latitudes_deg=latitudes_deg,
                                     longitudes_deg=longitudes_deg)

    return latitudes_deg, longitudes_deg
def create_distance_buffers(storm_object_table, min_distances_metres,
                            max_distances_metres):
    """Creates one or more distance buffers around each storm object.

    K = number of buffers

    :param storm_object_table: pandas DataFrame with the following columns.
        Each row is one storm object.
    storm_object_table.centroid_latitude_deg: Latitude (deg N) of storm-object
        centroid.
    storm_object_table.centroid_longitude_deg: Longitude (deg E) of storm-object
        centroid.
    storm_object_table.polygon_object_latlng_deg: Instance of
        `shapely.geometry.Polygon`, with x-coords in longitude (deg E) and
        y-coords in latitude (deg N).

    :param min_distances_metres: length-K numpy array of minimum distances.  If
        the storm object is inside the [k]th buffer -- i.e., the [k]th buffer
        has no minimum distance -- then min_distances_metres[k] should be NaN.
    :param max_distances_metres: length-K numpy array of max distances.
    :return: storm_object_table: Same as input but with K additional columns
        (one per distance buffer).  Column names are generated by
        `buffer_to_column_name`, and each value in these columns is a
        `shapely.geometry.Polygon` object, with x-coords in longitude (deg E) and
        y-coords in latitude (deg N).
    """

    num_buffers = len(min_distances_metres)
    these_expected_dim = numpy.array([num_buffers], dtype=int)
    error_checking.assert_is_numpy_array(max_distances_metres,
                                         exact_dimensions=these_expected_dim)

    global_centroid_lat_deg, global_centroid_lng_deg = (
        geodetic_utils.get_latlng_centroid(
            latitudes_deg=storm_object_table[CENTROID_LATITUDE_COLUMN].values,
            longitudes_deg=storm_object_table[CENTROID_LONGITUDE_COLUMN].values
        ))

    projection_object = projections.init_azimuthal_equidistant_projection(
        central_latitude_deg=global_centroid_lat_deg,
        central_longitude_deg=global_centroid_lng_deg)

    num_storm_objects = len(storm_object_table.index)
    object_array = numpy.full(num_storm_objects, numpy.nan, dtype=object)
    buffer_column_names = [''] * num_buffers

    for j in range(num_buffers):
        buffer_column_names[j] = buffer_to_column_name(
            min_distance_metres=min_distances_metres[j],
            max_distance_metres=max_distances_metres[j])

        storm_object_table = storm_object_table.assign(
            **{buffer_column_names[j]: object_array})

    for i in range(num_storm_objects):
        this_orig_vertex_dict_latlng_deg = (
            polygons.polygon_object_to_vertex_arrays(
                storm_object_table[LATLNG_POLYGON_COLUMN].values[i]))

        these_orig_x_metres, these_orig_y_metres = (
            projections.project_latlng_to_xy(
                latitudes_deg=this_orig_vertex_dict_latlng_deg[
                    polygons.EXTERIOR_Y_COLUMN],
                longitudes_deg=this_orig_vertex_dict_latlng_deg[
                    polygons.EXTERIOR_X_COLUMN],
                projection_object=projection_object))

        for j in range(num_buffers):
            this_buffer_poly_object_xy_metres = polygons.buffer_simple_polygon(
                vertex_x_metres=these_orig_x_metres,
                vertex_y_metres=these_orig_y_metres,
                min_buffer_dist_metres=min_distances_metres[j],
                max_buffer_dist_metres=max_distances_metres[j])

            this_buffer_vertex_dict = polygons.polygon_object_to_vertex_arrays(
                this_buffer_poly_object_xy_metres)

            (this_buffer_vertex_dict[polygons.EXTERIOR_Y_COLUMN],
             this_buffer_vertex_dict[polygons.EXTERIOR_X_COLUMN]
             ) = projections.project_xy_to_latlng(
                 x_coords_metres=this_buffer_vertex_dict[
                     polygons.EXTERIOR_X_COLUMN],
                 y_coords_metres=this_buffer_vertex_dict[
                     polygons.EXTERIOR_Y_COLUMN],
                 projection_object=projection_object)

            this_num_holes = len(
                this_buffer_vertex_dict[polygons.HOLE_X_COLUMN])

            for k in range(this_num_holes):
                (this_buffer_vertex_dict[polygons.HOLE_Y_COLUMN][k],
                 this_buffer_vertex_dict[polygons.HOLE_X_COLUMN][k]
                 ) = projections.project_xy_to_latlng(
                     x_coords_metres=this_buffer_vertex_dict[
                         polygons.HOLE_X_COLUMN][k],
                     y_coords_metres=this_buffer_vertex_dict[
                         polygons.HOLE_Y_COLUMN][k],
                     projection_object=projection_object)

            this_buffer_poly_object_latlng_deg = (
                polygons.vertex_arrays_to_polygon_object(
                    exterior_x_coords=this_buffer_vertex_dict[
                        polygons.EXTERIOR_X_COLUMN],
                    exterior_y_coords=this_buffer_vertex_dict[
                        polygons.EXTERIOR_Y_COLUMN],
                    hole_x_coords_list=this_buffer_vertex_dict[
                        polygons.HOLE_X_COLUMN],
                    hole_y_coords_list=this_buffer_vertex_dict[
                        polygons.HOLE_Y_COLUMN]))

            storm_object_table[buffer_column_names[j]].values[i] = (
                this_buffer_poly_object_latlng_deg)

    return storm_object_table
Exemple #14
0
def read_polygons_from_netcdf(netcdf_file_name,
                              metadata_dict=None,
                              spc_date_unix_sec=None,
                              tracking_start_time_unix_sec=None,
                              tracking_end_time_unix_sec=None,
                              raise_error_if_fails=True):
    """Reads storm polygons (outlines of storm cells) from NetCDF file.

    P = number of grid points in storm cell (different for each storm cell)
    V = number of vertices in storm polygon (different for each storm cell)

    If file cannot be opened, returns None.

    :param netcdf_file_name: Path to input file.
    :param metadata_dict: Dictionary with metadata for NetCDF file, created by
        `radar_io.read_metadata_from_raw_file`.
    :param spc_date_unix_sec: SPC date;
    :param tracking_start_time_unix_sec: Start time for tracking period.  This
        can be found by `get_start_end_times_for_spc_date`.
    :param tracking_end_time_unix_sec: End time for tracking period.  This can
        be found by `get_start_end_times_for_spc_date`.
    :param raise_error_if_fails: Boolean flag.  If True and file cannot be
        opened, this method will raise an error.
    :return: polygon_table: If file cannot be opened and raise_error_if_fails =
        False, this is None.  Otherwise, it is a pandas DataFrame with the
        following columns.
    polygon_table.storm_id: String ID for storm cell.
    polygon_table.unix_time_sec: Time in Unix format.
    polygon_table.spc_date_unix_sec: SPC date in Unix format.
    polygon_table.tracking_start_time_unix_sec: Start time for tracking period.
    polygon_table.tracking_end_time_unix_sec: End time for tracking period.
    polygon_table.centroid_lat_deg: Latitude at centroid of storm cell (deg N).
    polygon_table.centroid_lng_deg: Longitude at centroid of storm cell (deg E).
    polygon_table.grid_point_latitudes_deg: length-P numpy array with latitudes
        (deg N) of grid points in storm cell.
    polygon_table.grid_point_longitudes_deg: length-P numpy array with
        longitudes (deg E) of grid points in storm cell.
    polygon_table.grid_point_rows: length-P numpy array with row indices (all
        integers) of grid points in storm cell.
    polygon_table.grid_point_columns: length-P numpy array with column indices
        (all integers) of grid points in storm cell.
    polygon_table.polygon_object_latlng: Instance of `shapely.geometry.Polygon`
        with vertices in lat-long coordinates.
    polygon_table.polygon_object_rowcol: Instance of `shapely.geometry.Polygon`
        with vertices in row-column coordinates.
    """

    error_checking.assert_file_exists(netcdf_file_name)
    error_checking.assert_is_integer(spc_date_unix_sec)
    error_checking.assert_is_not_nan(spc_date_unix_sec)
    error_checking.assert_is_integer(tracking_start_time_unix_sec)
    error_checking.assert_is_not_nan(tracking_start_time_unix_sec)
    error_checking.assert_is_integer(tracking_end_time_unix_sec)
    error_checking.assert_is_not_nan(tracking_end_time_unix_sec)

    netcdf_dataset = netcdf_io.open_netcdf(netcdf_file_name,
                                           raise_error_if_fails)
    if netcdf_dataset is None:
        return None

    storm_id_var_name = metadata_dict[radar_io.FIELD_NAME_COLUMN]
    storm_id_var_name_orig = metadata_dict[radar_io.FIELD_NAME_COLUMN_ORIG]
    num_values = len(netcdf_dataset.variables[radar_io.GRID_ROW_COLUMN_ORIG])

    if num_values == 0:
        sparse_grid_dict = {
            radar_io.GRID_ROW_COLUMN: numpy.array([], dtype=int),
            radar_io.GRID_COLUMN_COLUMN: numpy.array([], dtype=int),
            radar_io.NUM_GRID_CELL_COLUMN: numpy.array([], dtype=int),
            storm_id_var_name: numpy.array([], dtype=int)
        }
    else:
        sparse_grid_dict = {
            radar_io.GRID_ROW_COLUMN:
            netcdf_dataset.variables[radar_io.GRID_ROW_COLUMN_ORIG][:],
            radar_io.GRID_COLUMN_COLUMN:
            netcdf_dataset.variables[radar_io.GRID_COLUMN_COLUMN_ORIG][:],
            radar_io.NUM_GRID_CELL_COLUMN:
            netcdf_dataset.variables[radar_io.NUM_GRID_CELL_COLUMN_ORIG][:],
            storm_id_var_name:
            netcdf_dataset.variables[storm_id_var_name_orig][:]
        }

    netcdf_dataset.close()
    sparse_grid_table = pandas.DataFrame.from_dict(sparse_grid_dict)
    numeric_storm_id_matrix, _, _ = (radar_s2f.sparse_to_full_grid(
        sparse_grid_table, metadata_dict))
    polygon_table = _storm_id_matrix_to_coord_lists(numeric_storm_id_matrix)

    num_storms = len(polygon_table.index)
    unix_times_sec = numpy.full(num_storms,
                                metadata_dict[radar_io.UNIX_TIME_COLUMN],
                                dtype=int)
    spc_dates_unix_sec = numpy.full(num_storms, spc_date_unix_sec, dtype=int)
    tracking_start_times_unix_sec = numpy.full(num_storms,
                                               tracking_start_time_unix_sec,
                                               dtype=int)
    tracking_end_times_unix_sec = numpy.full(num_storms,
                                             tracking_end_time_unix_sec,
                                             dtype=int)

    spc_date_string = time_conversion.time_to_spc_date_string(
        spc_date_unix_sec)
    storm_ids = _append_spc_date_to_storm_ids(
        polygon_table[tracking_io.STORM_ID_COLUMN].values, spc_date_string)

    simple_array = numpy.full(num_storms, numpy.nan)
    object_array = numpy.full(num_storms, numpy.nan, dtype=object)
    nested_array = polygon_table[[
        tracking_io.STORM_ID_COLUMN, tracking_io.STORM_ID_COLUMN
    ]].values.tolist()

    argument_dict = {
        tracking_io.STORM_ID_COLUMN: storm_ids,
        tracking_io.TIME_COLUMN: unix_times_sec,
        tracking_io.SPC_DATE_COLUMN: spc_dates_unix_sec,
        tracking_io.TRACKING_START_TIME_COLUMN: tracking_start_times_unix_sec,
        tracking_io.TRACKING_END_TIME_COLUMN: tracking_end_times_unix_sec,
        tracking_io.CENTROID_LAT_COLUMN: simple_array,
        tracking_io.CENTROID_LNG_COLUMN: simple_array,
        tracking_io.GRID_POINT_LAT_COLUMN: nested_array,
        tracking_io.GRID_POINT_LNG_COLUMN: nested_array,
        tracking_io.POLYGON_OBJECT_LATLNG_COLUMN: object_array,
        tracking_io.POLYGON_OBJECT_ROWCOL_COLUMN: object_array
    }
    polygon_table = polygon_table.assign(**argument_dict)

    for i in range(num_storms):
        these_vertex_rows, these_vertex_columns = (
            polygons.grid_points_in_poly_to_vertices(
                polygon_table[tracking_io.GRID_POINT_ROW_COLUMN].values[i],
                polygon_table[tracking_io.GRID_POINT_COLUMN_COLUMN].values[i]))

        (polygon_table[tracking_io.GRID_POINT_ROW_COLUMN].values[i],
         polygon_table[tracking_io.GRID_POINT_COLUMN_COLUMN].values[i]) = (
             polygons.simple_polygon_to_grid_points(these_vertex_rows,
                                                    these_vertex_columns))

        (polygon_table[tracking_io.GRID_POINT_LAT_COLUMN].values[i],
         polygon_table[tracking_io.GRID_POINT_LNG_COLUMN].values[i]) = (
             radar_io.rowcol_to_latlng(
                 polygon_table[tracking_io.GRID_POINT_ROW_COLUMN].values[i],
                 polygon_table[tracking_io.GRID_POINT_COLUMN_COLUMN].values[i],
                 nw_grid_point_lat_deg=metadata_dict[
                     radar_io.NW_GRID_POINT_LAT_COLUMN],
                 nw_grid_point_lng_deg=metadata_dict[
                     radar_io.NW_GRID_POINT_LNG_COLUMN],
                 lat_spacing_deg=metadata_dict[radar_io.LAT_SPACING_COLUMN],
                 lng_spacing_deg=metadata_dict[radar_io.LNG_SPACING_COLUMN]))

        these_vertex_lat_deg, these_vertex_lng_deg = radar_io.rowcol_to_latlng(
            these_vertex_rows,
            these_vertex_columns,
            nw_grid_point_lat_deg=metadata_dict[
                radar_io.NW_GRID_POINT_LAT_COLUMN],
            nw_grid_point_lng_deg=metadata_dict[
                radar_io.NW_GRID_POINT_LNG_COLUMN],
            lat_spacing_deg=metadata_dict[radar_io.LAT_SPACING_COLUMN],
            lng_spacing_deg=metadata_dict[radar_io.LNG_SPACING_COLUMN])

        (polygon_table[tracking_io.CENTROID_LAT_COLUMN].values[i],
         polygon_table[tracking_io.CENTROID_LNG_COLUMN].values[i]) = (
             polygons.get_latlng_centroid(these_vertex_lat_deg,
                                          these_vertex_lng_deg))

        polygon_table[tracking_io.POLYGON_OBJECT_ROWCOL_COLUMN].values[i] = (
            polygons.vertex_arrays_to_polygon_object(these_vertex_columns,
                                                     these_vertex_rows))
        polygon_table[tracking_io.POLYGON_OBJECT_LATLNG_COLUMN].values[i] = (
            polygons.vertex_arrays_to_polygon_object(these_vertex_lng_deg,
                                                     these_vertex_lat_deg))

    return polygon_table
STORM_TO_WARNING_DIST_METRES = {
    0: 0.,
    1: 4.5,
    2: 0.,
    3: 1.5,
    4: 0.,
    5: 4.5,
    6: 0.,
    7: 1.5,
    8: numpy.sqrt(24.25),
}

THESE_X_METRES = numpy.array([-10, -10, 10, 10, -10], dtype=float)
THESE_Y_METRES = numpy.array([-3, 3, 3, -3, -3], dtype=float)
WARNING_POLYGON_OBJECT_XY = polygons.vertex_arrays_to_polygon_object(
    exterior_x_coords=THESE_X_METRES, exterior_y_coords=THESE_Y_METRES)

# The following constants are used to test _link_one_warning.
THESE_START_TIMES_UNIX_SEC = numpy.array([4, 3], dtype=int)
THESE_END_TIMES_UNIX_SEC = numpy.array([6, 5], dtype=int)

THESE_X_METRES = numpy.array([274, 274, 276, 276, 274], dtype=float)
THESE_Y_METRES = numpy.array([59.5, 60.5, 60.5, 59.5, 59.5])
FIRST_POLYGON_OBJECT = polygons.vertex_arrays_to_polygon_object(
    exterior_x_coords=THESE_X_METRES, exterior_y_coords=THESE_Y_METRES)

THESE_X_METRES = numpy.array([273, 273, 275, 275, 273], dtype=float)
SECOND_POLYGON_OBJECT = polygons.vertex_arrays_to_polygon_object(
    exterior_x_coords=THESE_X_METRES, exterior_y_coords=THESE_Y_METRES)

THIS_DICT = {
            lat_spacing_deg=LATITUDE_SPACING_DEG,
            lng_spacing_deg=LONGITUDE_SPACING_DEG))

    (THIS_CENTROID_LAT_DEG,
     THIS_CENTROID_LNG_DEG) = geodetic_utils.get_latlng_centroid(
         latitudes_deg=THESE_VERTEX_LATITUDES_DEG,
         longitudes_deg=THESE_VERTEX_LONGITUDES_DEG)

    STORM_OBJECT_TABLE_SMALL_SCALE[
        tracking_utils.CENTROID_LAT_COLUMN].values[i] = THIS_CENTROID_LAT_DEG
    STORM_OBJECT_TABLE_SMALL_SCALE[
        tracking_utils.CENTROID_LNG_COLUMN].values[i] = THIS_CENTROID_LNG_DEG

    STORM_OBJECT_TABLE_SMALL_SCALE[
        tracking_utils.POLYGON_OBJECT_LATLNG_COLUMN].values[i] = (
            polygons.vertex_arrays_to_polygon_object(
                THESE_VERTEX_LONGITUDES_DEG, THESE_VERTEX_LATITUDES_DEG))
    STORM_OBJECT_TABLE_SMALL_SCALE[
        tracking_utils.POLYGON_OBJECT_ROWCOL_COLUMN].values[i] = (
            polygons.vertex_arrays_to_polygon_object(THESE_VERTEX_COLUMNS,
                                                     THESE_VERTEX_ROWS))

FLAT_GRID_POINT_INDICES_STORM_I = numpy.array([9, 10, 17, 18], dtype=int)
FLAT_GRID_POINT_INDICES_STORM_II = numpy.array([12, 13, 20, 21], dtype=int)
FLAT_GRID_POINT_INDICES_STORM_III = numpy.array([36, 37, 44, 45], dtype=int)
FLATTENED_GRID_POINT_INDICES = numpy.concatenate(
    (FLAT_GRID_POINT_INDICES_STORM_I, FLAT_GRID_POINT_INDICES_STORM_II,
     FLAT_GRID_POINT_INDICES_STORM_III))
FLATTENED_STORM_IDS = [
    'i', 'i', 'i', 'i', 'ii', 'ii', 'ii', 'ii', 'iii', 'iii', 'iii', 'iii'
]
Exemple #17
0
def get_stats_for_storm_objects(
        storm_object_table,
        statistic_names=DEFAULT_STATISTIC_NAMES,
        grid_spacing_for_binary_matrix_metres=GRID_SPACING_FOR_BINARY_MATRIX_DEFAULT_METRES,
        num_vertices_in_smoothing_half_window=NUM_VERTICES_IN_SMOOTHING_HALF_WINDOW_DEFAULT,
        num_smoothing_iterations=NUM_SMOOTHING_ITERS_DEFAULT):
    """Computes shape statistics for one or more storm objects.

    K = number of statistics

    :param storm_object_table: pandas DataFrame with columns documented in
        `storm_tracking_io.write_processed_file`.  May contain additional
        columns.
    :param statistic_names: length-K list of statistics to compute.
    :param grid_spacing_for_binary_matrix_metres: See documentation for
        _xy_polygon_to_binary_matrix.
    :param num_vertices_in_smoothing_half_window: See documentation for
        `smoothing_via_iterative_averaging.sia_for_closed_polygon`.
    :param num_smoothing_iterations: See documentation for
        `smoothing_via_iterative_averaging.sia_for_closed_polygon`.
    :return: storm_shape_statistic_table: pandas DataFrame with 2 + K columns,
        where the last K columns are shape statistics.  Names of these columns
        come from the input list statistic_names.  The first 2 columns are
        listed below.
    storm_shape_statistic_table.storm_id: String ID for storm cell.  Same as
        input column `storm_object_table.storm_id`.
    storm_shape_statistic_table.unix_time_sec: Valid time.  Same as input column
        `storm_object_table.unix_time_sec`.
    """

    _check_statistic_names(statistic_names)
    basic_stat_names = _get_basic_statistic_names(statistic_names)
    region_property_names = _get_region_property_names(statistic_names)
    curvature_based_stat_names = _get_curvature_based_stat_names(
        statistic_names)

    num_storm_objects = len(storm_object_table.index)
    nan_array = numpy.full(num_storm_objects, numpy.nan)

    argument_dict = {}
    for this_name in statistic_names:
        argument_dict.update({this_name: nan_array})
    storm_object_table = storm_object_table.assign(**argument_dict)

    for i in range(num_storm_objects):
        if numpy.mod(i, REPORT_EVERY_N_STORM_OBJECTS) == 0 and i > 0:
            print(
                'Have computed shape statistics for {0:d} of {1:d} storm '
                'objects...').format(i, num_storm_objects)

        this_polygon_object_xy = _project_polygon_latlng_to_xy(
            storm_object_table[
                tracking_utils.POLYGON_OBJECT_LATLNG_COLUMN].values[i],
            centroid_latitude_deg=storm_object_table[
                tracking_utils.CENTROID_LAT_COLUMN].values[i],
            centroid_longitude_deg=storm_object_table[
                tracking_utils.CENTROID_LNG_COLUMN].values[i])

        if basic_stat_names:
            this_basic_stat_dict = get_basic_statistics(
                this_polygon_object_xy, basic_stat_names)

            for this_name in basic_stat_names:
                storm_object_table[this_name].values[i] = this_basic_stat_dict[
                    this_name]

        if region_property_names:
            this_binary_image_matrix = _xy_polygon_to_binary_matrix(
                this_polygon_object_xy, grid_spacing_for_binary_matrix_metres)

            this_region_prop_dict = get_region_properties(
                this_binary_image_matrix, property_names=region_property_names)

            for this_name in region_property_names:
                storm_object_table[this_name].values[
                    i] = this_region_prop_dict[this_name]

        if curvature_based_stat_names:
            these_x_smoothed_metres, these_y_smoothed_metres = (
                sia.sia_for_closed_polygon(
                    this_polygon_object_xy,
                    num_vertices_in_half_window=
                    num_vertices_in_smoothing_half_window,
                    num_iterations=num_smoothing_iterations,
                    check_input_args=i == 0))

            this_polygon_object_xy_smoothed = (
                polygons.vertex_arrays_to_polygon_object(
                    these_x_smoothed_metres, these_y_smoothed_metres))

            this_curvature_based_stat_dict = get_curvature_based_stats(
                this_polygon_object_xy_smoothed,
                statistic_names=curvature_based_stat_names)

            for this_name in curvature_based_stat_names:
                storm_object_table[this_name].values[i] = (
                    this_curvature_based_stat_dict[this_name])

    print 'Have computed shape statistics for all {0:d} storm objects!'.format(
        num_storm_objects)

    return storm_object_table[STORM_COLUMNS_TO_KEEP + statistic_names]
def _get_data_for_interp_with_split():
    """Creates synthetic data for interpolation with storm split.

    :return: storm_object_table: pandas DataFrame with the following columns.
        Each row is one storm object.
    storm_object_table.primary_id_string: Primary storm ID.
    storm_object_table.secondary_id_string: Secondary storm ID.
    storm_object_table.valid_time_unix_sec: Valid time.
    storm_object_table.centroid_x_metres: x-coordinate of centroid.
    storm_object_table.centroid_y_metres: y-coordinate of centroid.
    storm_object_table.polygon_object_xy_metres: Storm outline (instance of
        `shapely.geometry.Polygon`).
    storm_object_table.first_prev_secondary_id_string: Secondary ID of first
        predecessor ("" if no predecessors).
    storm_object_table.second_prev_secondary_id_string: Secondary ID of second
        predecessor ("" if only one predecessor).
    storm_object_table.first_next_secondary_id_string: Secondary ID of first
        successor ("" if no successors).
    storm_object_table.second_next_secondary_id_string: Secondary ID of second
        successor ("" if no successors).

    :return: tornado_table: pandas DataFrame with the following columns.
    tornado_table.valid_time_unix_sec: Valid time.
    tornado_table.x_coord_metres: x-coordinate.
    tornado_table.y_coord_metres: y-coordinate.
    """

    primary_id_strings = ['foo'] * 5
    secondary_id_strings = ['A', 'A', 'A', 'B', 'C']

    valid_times_unix_sec = numpy.array([5, 10, 15, 20, 20], dtype=int)
    centroid_x_coords = numpy.array([2, 7, 12, 17, 17], dtype=float)
    centroid_y_coords = numpy.array([5, 5, 5, 8, 2], dtype=float)

    first_prev_sec_id_strings = ['', 'A', 'A', 'A', 'A']
    second_prev_sec_id_strings = ['', '', '', '', '']
    first_next_sec_id_strings = ['A', 'A', 'B', '', '']
    second_next_sec_id_strings = ['', '', 'C', '', '']

    num_storm_objects = len(secondary_id_strings)
    polygon_objects_xy = [None] * num_storm_objects

    for i in range(num_storm_objects):
        if secondary_id_strings[i] == 'B':
            these_x_coords = OCTAGON_X_COORDS
            these_y_coords = OCTAGON_Y_COORDS
        elif secondary_id_strings[i] == 'C':
            these_x_coords = HEXAGON_X_COORDS
            these_y_coords = HEXAGON_Y_COORDS
        else:
            these_x_coords = SQUARE_X_COORDS
            these_y_coords = SQUARE_Y_COORDS

        polygon_objects_xy[i] = polygons.vertex_arrays_to_polygon_object(
            exterior_x_coords=centroid_x_coords[i] + these_x_coords / 2,
            exterior_y_coords=centroid_y_coords[i] + these_y_coords / 2
        )

    storm_object_table = pandas.DataFrame.from_dict({
        tracking_utils.PRIMARY_ID_COLUMN: primary_id_strings,
        tracking_utils.SECONDARY_ID_COLUMN: secondary_id_strings,
        tracking_utils.VALID_TIME_COLUMN: valid_times_unix_sec,
        tracking_utils.CENTROID_X_COLUMN: centroid_x_coords,
        tracking_utils.CENTROID_Y_COLUMN: centroid_y_coords,
        tracking_utils.FIRST_PREV_SECONDARY_ID_COLUMN:
            first_prev_sec_id_strings,
        tracking_utils.SECOND_PREV_SECONDARY_ID_COLUMN:
            second_prev_sec_id_strings,
        tracking_utils.FIRST_NEXT_SECONDARY_ID_COLUMN:
            first_next_sec_id_strings,
        tracking_utils.SECOND_NEXT_SECONDARY_ID_COLUMN:
            second_next_sec_id_strings,
        POLYGON_COLUMN: polygon_objects_xy
    })

    tornado_table = pandas.DataFrame.from_dict({
        TORNADO_TIME_COLUMN: numpy.array([18], dtype=int),
        TORNADO_X_COLUMN: numpy.array([15.]),
        TORNADO_Y_COLUMN: numpy.array([3.2])
    })

    return storm_object_table, tornado_table
Exemple #19
0
def _plot_background_of_attributes_diagram(axes_object, climatology):
    """Plots background (references lines and polygons) of attributes diagram.

    For more on the attributes diagram, see Hsu and Murphy (1986).

    BSS = Brier skill score.  For more on the BSS, see
    `model_evaluation.get_brier_skill_score`.

    :param axes_object: Instance of `matplotlib.axes._subplots.AxesSubplot`.
    :param climatology: Event frequency for the entire dataset.
    """

    error_checking.assert_is_geq(climatology, 0.)
    error_checking.assert_is_leq(climatology, 1.)

    (x_coords_left_skill_area, y_coords_left_skill_area,
     x_coords_right_skill_area, y_coords_right_skill_area
     ) = model_eval.get_skill_areas_in_reliability_curve(climatology)

    skill_area_colour = matplotlib.colors.to_rgba(
        plotting_utils.colour_from_numpy_to_tuple(ZERO_BSS_COLOUR),
        POSITIVE_BSS_OPACITY)

    left_polygon_object = polygons.vertex_arrays_to_polygon_object(
        x_coords_left_skill_area, y_coords_left_skill_area)
    left_polygon_patch = PolygonPatch(left_polygon_object,
                                      lw=0,
                                      ec=skill_area_colour,
                                      fc=skill_area_colour)

    axes_object.add_patch(left_polygon_patch)

    right_polygon_object = polygons.vertex_arrays_to_polygon_object(
        x_coords_right_skill_area, y_coords_right_skill_area)
    right_polygon_patch = PolygonPatch(right_polygon_object,
                                       lw=0,
                                       ec=skill_area_colour,
                                       fc=skill_area_colour)

    axes_object.add_patch(right_polygon_patch)

    no_skill_x_coords, no_skill_y_coords = (
        model_eval.get_no_skill_reliability_curve(climatology))

    axes_object.plot(
        no_skill_x_coords,
        no_skill_y_coords,
        color=plotting_utils.colour_from_numpy_to_tuple(ZERO_BSS_COLOUR),
        linestyle='solid',
        linewidth=ZERO_BSS_LINE_WIDTH)

    climo_x_coords, climo_y_coords = (
        model_eval.get_climatology_line_for_reliability_curve(climatology))

    axes_object.plot(
        climo_x_coords,
        climo_y_coords,
        color=plotting_utils.colour_from_numpy_to_tuple(CLIMO_COLOUR),
        linestyle='dashed',
        linewidth=CLIMO_LINE_WIDTH)

    no_resolution_x_coords, no_resolution_y_coords = (
        model_eval.get_no_resolution_line_for_reliability_curve(climatology))

    axes_object.plot(
        no_resolution_x_coords,
        no_resolution_y_coords,
        color=plotting_utils.colour_from_numpy_to_tuple(CLIMO_COLOUR),
        linestyle='dashed',
        linewidth=CLIMO_LINE_WIDTH)
def _get_data_for_interp_with_merger():
    """Creates synthetic data for interpolation with storm merger.

    :return: storm_object_table: See doc for `_get_data_for_interp_with_split`.
    :return: tornado_table: Same.
    """

    primary_id_strings = ['foo'] * 6
    secondary_id_strings = ['A', 'B', 'A', 'B', 'C', 'C']

    valid_times_unix_sec = numpy.array([5, 5, 10, 10, 15, 20], dtype=int)
    centroid_x_coords = numpy.array([2, 2, 7, 7, 12, 17], dtype=float)
    centroid_y_coords = numpy.array([8, 2, 8, 2, 5, 5], dtype=float)

    first_prev_sec_id_strings = ['', '', 'A', 'B', 'A', 'C']
    second_prev_sec_id_strings = ['', '', '', '', 'B', '']
    first_next_sec_id_strings = ['A', 'B', 'C', 'C', 'C', '']
    second_next_sec_id_strings = ['', '', '', '', '', '']

    num_storm_objects = len(secondary_id_strings)
    polygon_objects_xy = [None] * num_storm_objects

    for i in range(num_storm_objects):
        if secondary_id_strings[i] == 'A':
            these_x_coords = OCTAGON_X_COORDS
            these_y_coords = OCTAGON_Y_COORDS
        elif secondary_id_strings[i] == 'B':
            these_x_coords = HEXAGON_X_COORDS
            these_y_coords = HEXAGON_Y_COORDS
        else:
            these_x_coords = SQUARE_X_COORDS
            these_y_coords = SQUARE_Y_COORDS

        polygon_objects_xy[i] = polygons.vertex_arrays_to_polygon_object(
            exterior_x_coords=centroid_x_coords[i] + these_x_coords / 2,
            exterior_y_coords=centroid_y_coords[i] + these_y_coords / 2
        )

    storm_object_table = pandas.DataFrame.from_dict({
        tracking_utils.PRIMARY_ID_COLUMN: primary_id_strings,
        tracking_utils.SECONDARY_ID_COLUMN: secondary_id_strings,
        tracking_utils.VALID_TIME_COLUMN: valid_times_unix_sec,
        tracking_utils.CENTROID_X_COLUMN: centroid_x_coords,
        tracking_utils.CENTROID_Y_COLUMN: centroid_y_coords,
        tracking_utils.FIRST_PREV_SECONDARY_ID_COLUMN:
            first_prev_sec_id_strings,
        tracking_utils.SECOND_PREV_SECONDARY_ID_COLUMN:
            second_prev_sec_id_strings,
        tracking_utils.FIRST_NEXT_SECONDARY_ID_COLUMN:
            first_next_sec_id_strings,
        tracking_utils.SECOND_NEXT_SECONDARY_ID_COLUMN:
            second_next_sec_id_strings,
        POLYGON_COLUMN: polygon_objects_xy
    })

    tornado_table = pandas.DataFrame.from_dict({
        TORNADO_TIME_COLUMN: numpy.array([12], dtype=int),
        TORNADO_X_COLUMN: numpy.array([9.]),
        TORNADO_Y_COLUMN: numpy.array([3.2])
    })

    return storm_object_table, tornado_table
def _plot_interp_two_times(storm_object_table, tornado_table, legend_font_size,
                           legend_position_string):
    """Plots interpolation for one pair of times.

    :param storm_object_table: See doc for `_get_interp_data_for_split`.
    :param tornado_table: Same.
    :param legend_font_size: Font size in legend.
    :param legend_position_string: Legend position.
    :return: figure_object: Figure handle (instance of
        `matplotlib.figure.Figure`).
    :return: axes_object: Axes handle (instance of
        `matplotlib.axes._subplots.AxesSubplot`).
    """

    centroid_x_coords = (
        storm_object_table[tracking_utils.CENTROID_X_COLUMN].values
    )
    centroid_y_coords = (
        storm_object_table[tracking_utils.CENTROID_Y_COLUMN].values
    )
    storm_times_minutes = (
        storm_object_table[tracking_utils.VALID_TIME_COLUMN].values
    ).astype(float)
    secondary_id_strings = (
        storm_object_table[tracking_utils.SECONDARY_ID_COLUMN].values
    )

    storm_object_table = storm_object_table.assign(**{
        tracking_utils.CENTROID_LONGITUDE_COLUMN: centroid_x_coords,
        tracking_utils.CENTROID_LATITUDE_COLUMN: centroid_y_coords
    })

    figure_object, axes_object, basemap_object = (
        plotting_utils.create_equidist_cylindrical_map(
            min_latitude_deg=numpy.min(centroid_y_coords),
            max_latitude_deg=numpy.max(centroid_y_coords),
            min_longitude_deg=numpy.min(centroid_x_coords),
            max_longitude_deg=numpy.max(centroid_x_coords)
        )
    )

    storm_plotting.plot_storm_tracks(
        storm_object_table=storm_object_table, axes_object=axes_object,
        basemap_object=basemap_object, colour_map_object=None,
        constant_colour=TRACK_COLOUR, line_width=TRACK_WIDTH,
        start_marker_type=None, end_marker_type=None)

    num_storm_objects = len(storm_object_table.index)
    legend_handles = []
    legend_strings = []

    for i in range(num_storm_objects):
        this_patch_object = PolygonPatch(
            storm_object_table[POLYGON_COLUMN].values[i],
            lw=0, ec=NON_INTERP_COLOUR, fc=NON_INTERP_COLOUR,
            alpha=POLYGON_OPACITY)

        axes_object.add_patch(this_patch_object)

    this_handle = axes_object.plot(
        storm_object_table[tracking_utils.CENTROID_X_COLUMN].values,
        storm_object_table[tracking_utils.CENTROID_Y_COLUMN].values,
        linestyle='None', marker=DEFAULT_MARKER_TYPE,
        markersize=DEFAULT_MARKER_SIZE, markerfacecolor=NON_INTERP_COLOUR,
        markeredgecolor=NON_INTERP_COLOUR,
        markeredgewidth=DEFAULT_MARKER_EDGE_WIDTH
    )[0]

    legend_handles.append(this_handle)
    legend_strings.append('Actual storm')

    for i in range(num_storm_objects):
        axes_object.text(
            centroid_x_coords[i], centroid_y_coords[i] - TEXT_OFFSET,
            secondary_id_strings[i], color=TRACK_COLOUR,
            fontsize=DEFAULT_FONT_SIZE, fontweight='bold',
            horizontalalignment='center', verticalalignment='top')

    tornado_time_minutes = tornado_table[TORNADO_TIME_COLUMN].values[0]
    previous_time_minutes = numpy.max(
        storm_times_minutes[storm_times_minutes < tornado_time_minutes]
    )
    next_time_minutes = numpy.min(
        storm_times_minutes[storm_times_minutes > tornado_time_minutes]
    )

    previous_object_indices = numpy.where(
        storm_times_minutes == previous_time_minutes
    )[0]
    next_object_indices = numpy.where(
        storm_times_minutes == next_time_minutes
    )[0]

    previous_x_coord = numpy.mean(centroid_x_coords[previous_object_indices])
    previous_y_coord = numpy.mean(centroid_y_coords[previous_object_indices])
    next_x_coord = numpy.mean(centroid_x_coords[next_object_indices])
    next_y_coord = numpy.mean(centroid_y_coords[next_object_indices])

    if len(next_object_indices) == 1:
        midpoint_x_coord = previous_x_coord
        midpoint_y_coord = previous_y_coord
        midpoint_label_string = 'Midpoint of {0:s} and {1:s}'.format(
            secondary_id_strings[previous_object_indices[0]],
            secondary_id_strings[previous_object_indices[1]]
        )

        line_x_coords = numpy.array([midpoint_x_coord, next_x_coord])
        line_y_coords = numpy.array([midpoint_y_coord, next_y_coord])
    else:
        midpoint_x_coord = next_x_coord
        midpoint_y_coord = next_y_coord
        midpoint_label_string = 'Midpoint of {0:s} and {1:s}'.format(
            secondary_id_strings[next_object_indices[0]],
            secondary_id_strings[next_object_indices[1]]
        )

        line_x_coords = numpy.array([previous_x_coord, midpoint_x_coord])
        line_y_coords = numpy.array([previous_y_coord, midpoint_y_coord])

    this_handle = axes_object.plot(
        midpoint_x_coord, midpoint_y_coord, linestyle='None',
        marker=DEFAULT_MARKER_TYPE, markersize=DEFAULT_MARKER_SIZE,
        markerfacecolor=MIDPOINT_COLOUR, markeredgecolor=MIDPOINT_COLOUR,
        markeredgewidth=DEFAULT_MARKER_EDGE_WIDTH
    )[0]

    legend_handles.append(this_handle)
    legend_strings.append(midpoint_label_string)

    this_ratio = (
        (tornado_time_minutes - previous_time_minutes) /
        (next_time_minutes - previous_time_minutes)
    )
    interp_x_coord = previous_x_coord + (
        this_ratio * (next_x_coord - previous_x_coord)
    )
    interp_y_coord = previous_y_coord + (
        this_ratio * (next_y_coord - previous_y_coord)
    )

    if len(next_object_indices) == 1:
        x_offset = interp_x_coord - next_x_coord
        y_offset = interp_y_coord - next_y_coord
        interp_polygon_object_xy = storm_object_table[POLYGON_COLUMN].values[
            next_object_indices[0]
        ]
    else:
        x_offset = interp_x_coord - previous_x_coord
        y_offset = interp_y_coord - previous_y_coord
        interp_polygon_object_xy = storm_object_table[POLYGON_COLUMN].values[
            previous_object_indices[0]
        ]

    interp_polygon_object_xy = polygons.vertex_arrays_to_polygon_object(
        exterior_x_coords=(
            x_offset + numpy.array(interp_polygon_object_xy.exterior.xy[0])
        ),
        exterior_y_coords=(
            y_offset + numpy.array(interp_polygon_object_xy.exterior.xy[1])
        )
    )

    this_patch_object = PolygonPatch(
        interp_polygon_object_xy, lw=0, ec=INTERP_COLOUR, fc=INTERP_COLOUR,
        alpha=POLYGON_OPACITY)
    axes_object.add_patch(this_patch_object)

    this_handle = axes_object.plot(
        interp_x_coord, interp_y_coord, linestyle='None',
        marker=DEFAULT_MARKER_TYPE, markersize=DEFAULT_MARKER_SIZE,
        markerfacecolor=INTERP_COLOUR, markeredgecolor=INTERP_COLOUR,
        markeredgewidth=DEFAULT_MARKER_EDGE_WIDTH
    )[0]

    legend_handles.append(this_handle)
    legend_strings.append('Interpolated storm')

    this_handle = axes_object.plot(
        line_x_coords, line_y_coords,
        linestyle='dashed', color=MIDPOINT_COLOUR, linewidth=4
    )[0]

    legend_handles.insert(-1, this_handle)
    legend_strings.insert(-1, 'Interpolation line')

    this_handle = axes_object.plot(
        tornado_table[TORNADO_X_COLUMN].values[0],
        tornado_table[TORNADO_Y_COLUMN].values[0], linestyle='None',
        marker=TORNADO_MARKER_TYPE, markersize=TORNADO_MARKER_SIZE,
        markerfacecolor=INTERP_COLOUR, markeredgecolor=INTERP_COLOUR,
        markeredgewidth=TORNADO_MARKER_EDGE_WIDTH
    )[0]

    legend_handles.insert(1, this_handle)
    this_string = 'Tornado (at {0:d} min)'.format(
        int(numpy.round(tornado_time_minutes))
    )
    legend_strings.insert(1, this_string)

    x_tick_values, unique_indices = numpy.unique(
        centroid_x_coords, return_index=True)
    x_tick_labels = [
        '{0:d}'.format(int(numpy.round(storm_times_minutes[i])))
        for i in unique_indices
    ]

    axes_object.set_xticks(x_tick_values)
    axes_object.set_xticklabels(x_tick_labels)
    axes_object.set_xlabel('Storm time (minutes)')

    axes_object.set_yticks([], [])
    axes_object.legend(
        legend_handles, legend_strings, fontsize=legend_font_size,
        loc=legend_position_string)

    return figure_object, axes_object
Exemple #22
0
from gewittergefahr.gg_utils import polygons
from gewittergefahr.gg_utils import human_polygons

TOLERANCE = 1e-6
TOLERANCE_NUM_DECIMAL_PLACES = 6

# The following constants are used to test _vertex_list_to_polygon_list and
# _polygon_list_to_vertex_list.
TOY_VERTEX_ROWS = numpy.array([
    0, 5, 5, 0, 0, numpy.nan, 0, 0, 10, 10, 0, numpy.nan, -10, 20, 20, -10, -10
])
TOY_VERTEX_COLUMNS = numpy.array(
    [0, 0, 10, 10, 0, numpy.nan, 0, 5, 5, 0, 0, numpy.nan, 40, 40, 60, 60, 40])

THIS_FIRST_POLYGON_OBJECT = polygons.vertex_arrays_to_polygon_object(
    exterior_x_coords=TOY_VERTEX_COLUMNS[:5],
    exterior_y_coords=TOY_VERTEX_ROWS[:5])

THIS_SECOND_POLYGON_OBJECT = polygons.vertex_arrays_to_polygon_object(
    exterior_x_coords=TOY_VERTEX_COLUMNS[6:11],
    exterior_y_coords=TOY_VERTEX_ROWS[6:11])

THIS_THIRD_POLYGON_OBJECT = polygons.vertex_arrays_to_polygon_object(
    exterior_x_coords=TOY_VERTEX_COLUMNS[12:],
    exterior_y_coords=TOY_VERTEX_ROWS[12:])

TOY_POLYGON_OBJECTS = [
    THIS_FIRST_POLYGON_OBJECT, THIS_SECOND_POLYGON_OBJECT,
    THIS_THIRD_POLYGON_OBJECT
]
STORM_TO_WINDS_DICT = {
    tracking_io.STORM_ID_COLUMN: STORM_IDS,
    tracking_io.TIME_COLUMN: STORM_OBJECT_TIMES_UNIX_SEC,
    storms_to_winds.END_TIME_COLUMN: STORM_END_TIMES_UNIX_SEC,
    labels.NUM_OBSERVATIONS_FOR_LABEL_COLUMN: NUM_OBSERVATIONS_BY_STORM_OBJECT,
    REGRESSION_LABEL_COLUMN_NAME: REGRESSION_LABELS_M_S01,
    CLASSIFICATION_LABEL_COLUMN_NAME: CLASSIFICATION_LABELS
}
STORM_TO_WINDS_TABLE = pandas.DataFrame.from_dict(STORM_TO_WINDS_DICT)

# Add polygons (storm outlines) to table.
VERTEX_LATITUDES_DEG = numpy.array([53.4, 53.4, 53.6, 53.6, 53.5, 53.5, 53.4])
VERTEX_LONGITUDES_DEG = numpy.array(
    [246.4, 246.6, 246.6, 246.5, 246.5, 246.4, 246.4])

POLYGON_OBJECT_LATLNG = polygons.vertex_arrays_to_polygon_object(
    VERTEX_LONGITUDES_DEG, VERTEX_LATITUDES_DEG)
POLYGON_OBJECT_ARRAY_LATLNG = numpy.full(NUM_STORM_OBJECTS,
                                         POLYGON_OBJECT_LATLNG,
                                         dtype=object)

THIS_ARGUMENT_DICT = {
    tracking_io.POLYGON_OBJECT_LATLNG_COLUMN: POLYGON_OBJECT_ARRAY_LATLNG
}
STORM_TO_WINDS_TABLE = STORM_TO_WINDS_TABLE.assign(**THIS_ARGUMENT_DICT)

# The following constants are used to test _select_storms_uniformly_by_category
# and sample_by_uniform_wind_speed.
CUTOFFS_FOR_UNIFORM_SAMPLING_M_S01 = (KT_TO_METRES_PER_SECOND *
                                      numpy.array([10., 20., 30., 40., 50.]))
CATEGORIES_FOR_UNIFORM_SAMPLING = numpy.array([
    -1, -1, -1, -1, -1, 0, 0, 0, 1, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 4, 4, 4,
from gewittergefahr.gg_utils import polygons
from gewittergefahr.gg_utils import shape_utils

TOLERANCE = 1e-6
NUM_VERTICES_IN_SMOOTHING_HALF_WINDOW = 1

POLYLINE_X_COORDS = numpy.array(
    [3., 3., 0., 0., 3., 3., 5., 5., 8., 8., 5., 5.])
POLYLINE_Y_COORDS = numpy.array(
    [6., 3., 3., 1., 1., 0., 0., 1., 1., 3., 3., 6.])

POLYGON_X_COORDS = numpy.concatenate(
    (POLYLINE_X_COORDS, numpy.array([POLYLINE_X_COORDS[0]])))
POLYGON_Y_COORDS = numpy.concatenate(
    (POLYLINE_Y_COORDS, numpy.array([POLYLINE_Y_COORDS[0]])))
POLYGON_OBJECT = polygons.vertex_arrays_to_polygon_object(
    POLYGON_X_COORDS, POLYGON_Y_COORDS)

POLYLINE_X_COORDS_SMOOTHED = numpy.array(
    [3., 2., 1., 1., 2., 3.666667, 4.333333, 6., 7., 7., 6., 5.])
POLYLINE_Y_COORDS_SMOOTHED = numpy.array([
    6., 4., 2.333333, 1.666667, 0.666667, 0.333333, 0.333333, 0.666667,
    1.666667, 2.333333, 4., 6.
])

POLYGON_X_COORDS_SMOOTHED = numpy.array(
    [3.666667, 2., 1., 1., 2., 3.666667, 4.333333, 6., 7., 7., 6., 4.333333])
POLYGON_Y_COORDS_SMOOTHED = numpy.array([
    5., 4., 2.333333, 1.666667, 0.666667, 0.333333, 0.333333, 0.666667,
    1.666667, 2.333333, 4., 5.
])
def read_raw_file(raw_file_name):
    """Reads tracking data from raw (either JSON or ASCII) file.

    This file should contain all storm objects at one time step.

    :param raw_file_name: Path to input file.
    :return: storm_object_table: See documentation for
        `storm_tracking_io.write_processed_file`.
    """

    error_checking.assert_file_exists(raw_file_name)
    _, pathless_file_name = os.path.split(raw_file_name)
    _, file_extension = os.path.splitext(pathless_file_name)
    _check_raw_file_extension(file_extension)

    unix_time_sec = raw_file_name_to_time(raw_file_name)

    if file_extension == ASCII_FILE_EXTENSION:
        storm_ids = []
        east_velocities_m_s01 = []
        north_velocities_m_s01 = []
        list_of_latitude_vertex_arrays_deg = []
        list_of_longitude_vertex_arrays_deg = []

        for this_line in open(raw_file_name, 'r').readlines():
            these_words = this_line.split(':')
            if len(these_words) < MIN_WORDS_PER_ASCII_LINE:
                continue

            storm_ids.append(these_words[STORM_ID_INDEX_IN_ASCII_FILES])
            east_velocities_m_s01.append(
                float(these_words[U_MOTION_INDEX_IN_ASCII_FILES]))
            north_velocities_m_s01.append(
                -1 * float(these_words[V_MOTION_INDEX_IN_ASCII_FILES]))

            these_polygon_words = numpy.array(
                these_words[POLYGON_INDEX_IN_ASCII_FILES].split(','))
            these_latitude_words = these_polygon_words[
                LATITUDE_INDEX_IN_ASCII_FILES::2].tolist()
            these_longitude_words = these_polygon_words[
                LONGITUDE_INDEX_IN_ASCII_FILES::2].tolist()

            these_latitudes_deg = numpy.array(
                [float(w) for w in these_latitude_words])
            these_longitudes_deg = numpy.array(
                [float(w) for w in these_longitude_words])
            list_of_latitude_vertex_arrays_deg.append(these_latitudes_deg)
            list_of_longitude_vertex_arrays_deg.append(these_longitudes_deg)

        east_velocities_m_s01 = numpy.array(east_velocities_m_s01)
        north_velocities_m_s01 = numpy.array(north_velocities_m_s01)
        num_storms = len(storm_ids)

    else:
        with open(raw_file_name) as json_file_handle:
            probsevere_dict = json.load(json_file_handle)

        num_storms = len(probsevere_dict[FEATURES_KEY_IN_JSON_FILES])
        storm_ids = [None] * num_storms
        east_velocities_m_s01 = numpy.full(num_storms, numpy.nan)
        north_velocities_m_s01 = numpy.full(num_storms, numpy.nan)
        list_of_latitude_vertex_arrays_deg = [None] * num_storms
        list_of_longitude_vertex_arrays_deg = [None] * num_storms

        for i in range(num_storms):
            storm_ids[i] = str(
                probsevere_dict[FEATURES_KEY_IN_JSON_FILES][i]
                [PROPERTIES_KEY_IN_JSON_FILES][STORM_ID_KEY_IN_JSON_FILES])
            east_velocities_m_s01[i] = float(
                probsevere_dict[FEATURES_KEY_IN_JSON_FILES][i]
                [PROPERTIES_KEY_IN_JSON_FILES][U_MOTION_KEY_IN_JSON_FILES])
            north_velocities_m_s01[i] = -1 * float(
                probsevere_dict[FEATURES_KEY_IN_JSON_FILES][i]
                [PROPERTIES_KEY_IN_JSON_FILES][V_MOTION_KEY_IN_JSON_FILES])

            this_vertex_matrix_deg = numpy.array(
                probsevere_dict[FEATURES_KEY_IN_JSON_FILES][i]
                [GEOMETRY_KEY_IN_JSON_FILES][COORDINATES_KEY_IN_JSON_FILES][0])
            list_of_latitude_vertex_arrays_deg[i] = numpy.array(
                this_vertex_matrix_deg[:, LATITUDE_INDEX_IN_JSON_FILES])
            list_of_longitude_vertex_arrays_deg[i] = numpy.array(
                this_vertex_matrix_deg[:, LONGITUDE_INDEX_IN_JSON_FILES])

    spc_date_unix_sec = time_conversion.time_to_spc_date_unix_sec(
        unix_time_sec)
    unix_times_sec = numpy.full(num_storms, unix_time_sec, dtype=int)
    spc_dates_unix_sec = numpy.full(num_storms, spc_date_unix_sec, dtype=int)
    tracking_start_times_unix_sec = numpy.full(
        num_storms, DUMMY_TRACKING_START_TIME_UNIX_SEC, dtype=int)
    tracking_end_times_unix_sec = numpy.full(num_storms,
                                             DUMMY_TRACKING_END_TIME_UNIX_SEC,
                                             dtype=int)

    storm_object_dict = {
        tracking_utils.STORM_ID_COLUMN: storm_ids,
        tracking_utils.EAST_VELOCITY_COLUMN: east_velocities_m_s01,
        tracking_utils.NORTH_VELOCITY_COLUMN: north_velocities_m_s01,
        tracking_utils.TIME_COLUMN: unix_times_sec,
        tracking_utils.SPC_DATE_COLUMN: spc_dates_unix_sec,
        tracking_utils.TRACKING_START_TIME_COLUMN:
        tracking_start_times_unix_sec,
        tracking_utils.TRACKING_END_TIME_COLUMN: tracking_end_times_unix_sec
    }
    storm_object_table = pandas.DataFrame.from_dict(storm_object_dict)

    storm_ages_sec = numpy.full(num_storms, numpy.nan)
    simple_array = numpy.full(num_storms, numpy.nan)
    object_array = numpy.full(num_storms, numpy.nan, dtype=object)
    nested_array = storm_object_table[[
        tracking_utils.STORM_ID_COLUMN, tracking_utils.STORM_ID_COLUMN
    ]].values.tolist()

    argument_dict = {
        tracking_utils.AGE_COLUMN: storm_ages_sec,
        tracking_utils.CENTROID_LAT_COLUMN: simple_array,
        tracking_utils.CENTROID_LNG_COLUMN: simple_array,
        tracking_utils.GRID_POINT_LAT_COLUMN: nested_array,
        tracking_utils.GRID_POINT_LNG_COLUMN: nested_array,
        tracking_utils.GRID_POINT_ROW_COLUMN: nested_array,
        tracking_utils.GRID_POINT_COLUMN_COLUMN: nested_array,
        tracking_utils.POLYGON_OBJECT_LATLNG_COLUMN: object_array,
        tracking_utils.POLYGON_OBJECT_ROWCOL_COLUMN: object_array
    }
    storm_object_table = storm_object_table.assign(**argument_dict)

    for i in range(num_storms):
        these_vertex_rows, these_vertex_columns = (
            radar_utils.latlng_to_rowcol(
                latitudes_deg=list_of_latitude_vertex_arrays_deg[i],
                longitudes_deg=list_of_longitude_vertex_arrays_deg[i],
                nw_grid_point_lat_deg=NW_GRID_POINT_LAT_DEG,
                nw_grid_point_lng_deg=NW_GRID_POINT_LNG_DEG,
                lat_spacing_deg=GRID_LAT_SPACING_DEG,
                lng_spacing_deg=GRID_LNG_SPACING_DEG))

        these_vertex_rows, these_vertex_columns = (
            polygons.fix_probsevere_vertices(
                row_indices_orig=these_vertex_rows,
                column_indices_orig=these_vertex_columns))

        these_vertex_latitudes_deg, these_vertex_longitudes_deg = (
            radar_utils.rowcol_to_latlng(
                grid_rows=these_vertex_rows,
                grid_columns=these_vertex_columns,
                nw_grid_point_lat_deg=NW_GRID_POINT_LAT_DEG,
                nw_grid_point_lng_deg=NW_GRID_POINT_LNG_DEG,
                lat_spacing_deg=GRID_LAT_SPACING_DEG,
                lng_spacing_deg=GRID_LNG_SPACING_DEG))

        (storm_object_table[tracking_utils.GRID_POINT_ROW_COLUMN].values[i],
         storm_object_table[tracking_utils.GRID_POINT_COLUMN_COLUMN].values[i]
         ) = polygons.simple_polygon_to_grid_points(
             vertex_row_indices=these_vertex_rows,
             vertex_column_indices=these_vertex_columns)

        (storm_object_table[tracking_utils.GRID_POINT_LAT_COLUMN].values[i],
         storm_object_table[tracking_utils.GRID_POINT_LNG_COLUMN].values[i]
         ) = radar_utils.rowcol_to_latlng(
             grid_rows=storm_object_table[
                 tracking_utils.GRID_POINT_ROW_COLUMN].values[i],
             grid_columns=storm_object_table[
                 tracking_utils.GRID_POINT_COLUMN_COLUMN].values[i],
             nw_grid_point_lat_deg=NW_GRID_POINT_LAT_DEG,
             nw_grid_point_lng_deg=NW_GRID_POINT_LNG_DEG,
             lat_spacing_deg=GRID_LAT_SPACING_DEG,
             lng_spacing_deg=GRID_LNG_SPACING_DEG)

        (storm_object_table[tracking_utils.CENTROID_LAT_COLUMN].values[i],
         storm_object_table[tracking_utils.CENTROID_LNG_COLUMN].values[i]
         ) = geodetic_utils.get_latlng_centroid(
             latitudes_deg=these_vertex_latitudes_deg,
             longitudes_deg=these_vertex_longitudes_deg)

        storm_object_table[tracking_utils.POLYGON_OBJECT_ROWCOL_COLUMN].values[
            i] = polygons.vertex_arrays_to_polygon_object(
                exterior_x_coords=these_vertex_columns,
                exterior_y_coords=these_vertex_rows)

        storm_object_table[tracking_utils.POLYGON_OBJECT_LATLNG_COLUMN].values[
            i] = polygons.vertex_arrays_to_polygon_object(
                exterior_x_coords=these_vertex_longitudes_deg,
                exterior_y_coords=these_vertex_latitudes_deg)

    return storm_object_table
Exemple #26
0
def _plot_background_of_attributes_diagram(
        axes_object,
        climatology,
        no_skill_line_colour=DEFAULT_ZERO_BSS_COLOUR,
        no_skill_line_width=DEFAULT_ZERO_BSS_WIDTH,
        other_line_colour=DEFAULT_CLIMATOLOGY_COLOUR,
        other_line_width=DEFAULT_CLIMATOLOGY_WIDTH):
    """Plots background (references lines and polygons) of attributes diagram.

    For more on the attributes diagram, see Hsu and Murphy (1986).

    BSS = Brier skill score.  For more on the BSS, see
    `model_evaluation.get_brier_skill_score`.

    :param axes_object: Instance of `matplotlib.axes._subplots.AxesSubplot`.
    :param climatology: Event frequency for the entire dataset.
    :param no_skill_line_colour: Colour (in any format accepted by
        `matplotlib.colors`) of no-skill line, where BSS = 0.
    :param no_skill_line_width: Width (real positive number) of no-skill line.
    :param other_line_colour: Colour of climatology and no-resolution lines.
    :param other_line_width: Width of climatology and no-resolution lines.
    """

    error_checking.assert_is_geq(climatology, 0.)
    error_checking.assert_is_leq(climatology, 1.)

    (x_vertices_for_left_skill_area, y_vertices_for_left_skill_area,
     x_vertices_for_right_skill_area, y_vertices_for_right_skill_area
     ) = model_eval.get_skill_areas_in_reliability_curve(climatology)

    skill_area_colour = matplotlib.colors.to_rgba(
        plotting_utils.colour_from_numpy_to_tuple(no_skill_line_colour),
        TRANSPARENCY_FOR_POSITIVE_BSS_AREA)

    left_polygon_object = polygons.vertex_arrays_to_polygon_object(
        x_vertices_for_left_skill_area, y_vertices_for_left_skill_area)
    left_polygon_patch = PolygonPatch(left_polygon_object,
                                      lw=0,
                                      ec=skill_area_colour,
                                      fc=skill_area_colour)

    axes_object.add_patch(left_polygon_patch)

    right_polygon_object = polygons.vertex_arrays_to_polygon_object(
        x_vertices_for_right_skill_area, y_vertices_for_right_skill_area)
    right_polygon_patch = PolygonPatch(right_polygon_object,
                                       lw=0,
                                       ec=skill_area_colour,
                                       fc=skill_area_colour)

    axes_object.add_patch(right_polygon_patch)

    no_skill_x_coords, no_skill_y_coords = (
        model_eval.get_no_skill_reliability_curve(climatology))

    axes_object.plot(
        no_skill_x_coords,
        no_skill_y_coords,
        color=plotting_utils.colour_from_numpy_to_tuple(no_skill_line_colour),
        linestyle='solid',
        linewidth=no_skill_line_width)

    climo_x_coords, climo_y_coords = (
        model_eval.get_climatology_line_for_reliability_curve(climatology))

    axes_object.plot(
        climo_x_coords,
        climo_y_coords,
        color=plotting_utils.colour_from_numpy_to_tuple(other_line_colour),
        linestyle='dashed',
        linewidth=other_line_width)

    no_resolution_x_coords, no_resolution_y_coords = (
        model_eval.get_no_resolution_line_for_reliability_curve(climatology))

    axes_object.plot(
        no_resolution_x_coords,
        no_resolution_y_coords,
        color=plotting_utils.colour_from_numpy_to_tuple(other_line_colour),
        linestyle='dashed',
        linewidth=other_line_width)
Exemple #27
0
def _run(input_shapefile_name, first_time_string, last_time_string,
         output_pickle_file_name):
    """Converts tornado-warning polygons to nicer file format.

    This is effectively the main method.

    :param input_shapefile_name: See documentation at top of file.
    :param first_time_string: Same.
    :param last_time_string: Same.
    :param output_pickle_file_name: Same.
    """

    first_time_unix_sec = time_conversion.string_to_unix_sec(
        first_time_string, INPUT_TIME_FORMAT)
    last_time_unix_sec = time_conversion.string_to_unix_sec(
        last_time_string, INPUT_TIME_FORMAT)

    print('Reading data from: "{0:s}"...'.format(input_shapefile_name))
    shapefile_handle = shapefile.Reader(input_shapefile_name)

    list_of_polygon_objects_latlng = []
    start_times_unix_sec = []
    end_times_unix_sec = []

    for this_record_object in shapefile_handle.iterShapeRecords():
        if this_record_object.record[EVENT_TYPE_INDEX] != TORNADO_TYPE_STRING:
            continue

        if this_record_object.record[
                COUNTY_OR_WARNING_INDEX] != WARNING_TYPE_STRING:
            continue

        this_start_time_unix_sec = time_conversion.string_to_unix_sec(
            this_record_object.record[TIME_ISSUED_INDEX], SHAPEFILE_TIME_FORMAT
        )
        if this_start_time_unix_sec > last_time_unix_sec:
            continue

        this_end_time_unix_sec = time_conversion.string_to_unix_sec(
            this_record_object.record[TIME_EXPIRED_INDEX], SHAPEFILE_TIME_FORMAT
        )
        if this_end_time_unix_sec < first_time_unix_sec:
            continue

        print(this_record_object.record)

        this_area_dict = this_record_object.shape.__geo_interface__

        if this_area_dict[AREA_TYPE_KEY] not in VALID_AREA_TYPE_STRINGS:
            warning_string = (
                '\n{0:s}\nValid area types (listed above) do not include '
                '"{1:s}".'
            ).format(
                str(VALID_AREA_TYPE_STRINGS), this_area_dict[AREA_TYPE_KEY]
            )

            warnings.warn(warning_string)
            continue

        this_coords_object = this_area_dict[COORDINATES_KEY]
        if isinstance(this_coords_object, tuple):
            these_latlng_tuples = [this_coords_object[0]]
        else:
            these_latlng_tuples = this_coords_object

        for this_latlng_tuple in these_latlng_tuples:
            if isinstance(this_latlng_tuple, list):
                this_latlng_tuple = this_latlng_tuple[0]

            this_num_vertices = len(this_latlng_tuple)
            these_latitudes_deg = numpy.array(
                [this_latlng_tuple[k][1] for k in range(this_num_vertices)]
            )
            these_longitudes_deg = numpy.array(
                [this_latlng_tuple[k][0] for k in range(this_num_vertices)]
            )

            these_longitudes_deg = lng_conversion.convert_lng_positive_in_west(
                longitudes_deg=these_longitudes_deg, allow_nan=False)

            this_polygon_object_latlng = (
                polygons.vertex_arrays_to_polygon_object(
                    exterior_x_coords=these_longitudes_deg,
                    exterior_y_coords=these_latitudes_deg)
            )

            start_times_unix_sec.append(this_start_time_unix_sec)
            end_times_unix_sec.append(this_end_time_unix_sec)
            list_of_polygon_objects_latlng.append(this_polygon_object_latlng)

    warning_dict = {
        START_TIME_COLUMN: start_times_unix_sec,
        END_TIME_COLUMN: end_times_unix_sec,
        POLYGON_COLUMN: list_of_polygon_objects_latlng
    }

    warning_table = pandas.DataFrame.from_dict(warning_dict)
    # print warning_table

    print('Writing warnings to file: "{0:s}"...'.format(
        output_pickle_file_name))

    file_system_utils.mkdir_recursive_if_necessary(
        file_name=output_pickle_file_name)

    pickle_file_handle = open(output_pickle_file_name, 'wb')
    pickle.dump(warning_table, pickle_file_handle)
    pickle_file_handle.close()
                                             holes=(HOLE1_VERTEX_METRES_LIST,
                                                    HOLE2_VERTEX_METRES_LIST))

# The following constants are used to test project_xy_to_latlng and
# project_latlng_to_xy.
EXTERIOR_VERTEX_LATITUDES_DEG = numpy.array([49., 49., 60., 60., 53.8, 49.])
EXTERIOR_VERTEX_LONGITUDES_DEG = numpy.array(
    [246., 250., 250., 240., 240., 246.])
HOLE1_VERTEX_LATITUDES_DEG = numpy.array(
    [51.1, 52.2, 52.2, 53.3, 53.3, 51.1, 51.1])
HOLE1_VERTEX_LONGITUDES_DEG = numpy.array(
    [246., 246., 246.1, 246.1, 246.4, 246.4, 246.])

POLYGON_OBJECT_LATLNG = polygons.vertex_arrays_to_polygon_object(
    EXTERIOR_VERTEX_LONGITUDES_DEG,
    EXTERIOR_VERTEX_LATITUDES_DEG,
    hole_x_coords_list=[HOLE1_VERTEX_LONGITUDES_DEG],
    hole_y_coords_list=[HOLE1_VERTEX_LATITUDES_DEG])
PROJECTION_OBJECT = projections.init_azimuthal_equidistant_projection(
    central_latitude_deg=55., central_longitude_deg=245.)

# The following constants are used to test simple_polygon_to_grid_points.
VERTEX_ROWS_SIMPLE = numpy.array(
    [3.5, 3.5, 4.5, 4.5, -0.5, -0.5, 1.5, 1.5, 3.5])
VERTEX_COLUMNS_SIMPLE = numpy.array(
    [-0.5, 1.5, 1.5, 3.5, 3.5, 0.5, 0.5, -0.5, -0.5])
GRID_POINT_ROWS_IN_SIMPLE_POLY = numpy.array(
    [0, 0, 0, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4])
GRID_POINT_COLUMNS_IN_SIMPLE_POLY = numpy.array(
    [1, 2, 3, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2, 3, 2, 3])
"""Unit tests for shape_statistics.py."""

import unittest
import numpy
from gewittergefahr.gg_utils import shape_statistics as shape_stats
from gewittergefahr.gg_utils import polygons

FAKE_STATISTIC_NAME = 'foo'

VERTEX_X_METRES = numpy.array(
    [3., 3., 0., 0., 3., 3., 5., 5., 8., 8., 5., 5., 3.])
VERTEX_Y_METRES = numpy.array(
    [6., 3., 3., 1., 1., 0., 0., 1., 1., 3., 3., 6., 6.])
POLYGON_OBJECT_XY = polygons.vertex_arrays_to_polygon_object(
    VERTEX_X_METRES, VERTEX_Y_METRES)

GRID_SPACING_FOR_BINARY_MATRIX_METRES = 0.5
BINARY_IMAGE_MATRIX = numpy.array([
    [0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0],
    [0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0],
    [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
    [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
    [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
    [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
    [0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0],
    [0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0],
    [0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0],
    [0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0],
    [0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0],
    [0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0]], dtype=bool)
def find_points_in_conus(conus_latitudes_deg,
                         conus_longitudes_deg,
                         query_latitudes_deg,
                         query_longitudes_deg,
                         use_shortcuts=True,
                         verbose=False):
    """Finds points in CONUS.

    Q = number of query points

    This method assumes that the domain doesn't wrap around 0 deg E (Greenwich).

    If you set `use_shortcuts = True`, this method will assume that input
    coordinates `conus_latitudes_deg` and `conus_longitudes_deg` have been
    eroded by less than 100 km.

    :param conus_latitudes_deg: See doc for `_check_boundary`.
    :param conus_longitudes_deg: Same.
    :param query_latitudes_deg: length-Q numpy with latitudes (deg N) of query
        points.
    :param query_longitudes_deg: length-Q numpy with longitudes (deg E) of query
        points.
    :param use_shortcuts: Boolean flag.  If True, will use shortcuts to speed up
        calculation.
    :param verbose: Boolean flag.  If True, will print progress messages to
        command window.
    :return: in_conus_flags: length-Q numpy array of Boolean flags.
    """

    conus_longitudes_deg = _check_boundary(latitudes_deg=conus_latitudes_deg,
                                           longitudes_deg=conus_longitudes_deg)
    query_longitudes_deg = _check_boundary(latitudes_deg=query_latitudes_deg,
                                           longitudes_deg=query_longitudes_deg)
    error_checking.assert_is_boolean(use_shortcuts)
    error_checking.assert_is_boolean(verbose)

    num_query_points = len(query_latitudes_deg)
    in_conus_flags = numpy.full(num_query_points, -1, dtype=int)

    if use_shortcuts:

        # Use rectangle.
        latitude_flags = numpy.logical_and(
            query_latitudes_deg >= SHORTCUT_BOX_LATITUDES_DEG[0],
            query_latitudes_deg <= SHORTCUT_BOX_LATITUDES_DEG[1])
        longitude_flags = numpy.logical_and(
            query_longitudes_deg >= SHORTCUT_BOX_LONGITUDES_DEG[0],
            query_longitudes_deg <= SHORTCUT_BOX_LONGITUDES_DEG[1])
        in_conus_flags[numpy.logical_and(latitude_flags, longitude_flags)] = 1

        # Use simplified eroded boundary.
        module_dir_name = os.path.dirname(__file__)
        parent_dir_name = '/'.join(module_dir_name.split('/')[:-1])
        inner_boundary_file_name = (
            '{0:s}/conus_polygon_100-km-eroded.nc'.format(parent_dir_name))

        inner_conus_latitudes_deg, inner_conus_longitudes_deg = (
            read_from_netcdf(inner_boundary_file_name))
        trial_indices = numpy.where(in_conus_flags == -1)[0]

        these_flags = find_points_in_conus(
            conus_latitudes_deg=inner_conus_latitudes_deg,
            conus_longitudes_deg=inner_conus_longitudes_deg,
            query_latitudes_deg=query_latitudes_deg[trial_indices],
            query_longitudes_deg=query_longitudes_deg[trial_indices],
            use_shortcuts=False)
        these_indices = trial_indices[numpy.where(these_flags)]
        in_conus_flags[these_indices] = 1

        # Use simplified dilated boundary.
        module_dir_name = os.path.dirname(__file__)
        parent_dir_name = '/'.join(module_dir_name.split('/')[:-1])
        outer_boundary_file_name = (
            '{0:s}/conus_polygon_100-km-dilated.nc'.format(parent_dir_name))

        outer_conus_latitudes_deg, outer_conus_longitudes_deg = (
            read_from_netcdf(outer_boundary_file_name))
        trial_indices = numpy.where(in_conus_flags == -1)[0]

        these_flags = find_points_in_conus(
            conus_latitudes_deg=outer_conus_latitudes_deg,
            conus_longitudes_deg=outer_conus_longitudes_deg,
            query_latitudes_deg=query_latitudes_deg[trial_indices],
            query_longitudes_deg=query_longitudes_deg[trial_indices],
            use_shortcuts=False)
        these_indices = trial_indices[numpy.where(numpy.invert(these_flags))]
        in_conus_flags[these_indices] = 0

    conus_polygon_object = polygons.vertex_arrays_to_polygon_object(
        exterior_x_coords=conus_longitudes_deg,
        exterior_y_coords=conus_latitudes_deg)

    for i in range(num_query_points):
        if numpy.mod(i, 1000) == 0 and verbose:
            print(('Have done point-in-CONUS test for {0:d} of {1:d} points...'
                   ).format(i, num_query_points))

        if in_conus_flags[i] != -1:
            continue

        in_conus_flags[i] = polygons.point_in_or_on_polygon(
            polygon_object=conus_polygon_object,
            query_x_coordinate=query_longitudes_deg[i],
            query_y_coordinate=query_latitudes_deg[i])

    if verbose:
        print('Have done point-in-CONUS test for all {0:d} points!'.format(
            num_query_points))

    return in_conus_flags.astype(bool)