Exemplo n.º 1
0
def get_start_end_times_for_spc_date(spc_date_string, top_raw_directory_name,
                                     tracking_scale_metres2):
    """Returns first and last tracking times for SPC date.

    :param spc_date_string: SPC date (format "yyyymmdd").
    :param top_raw_directory_name: Name of top-level directory with raw
        segmotion files.
    :param tracking_scale_metres2: Tracking scale.
    :return: start_time_unix_sec: First tracking time for SPC date.
    :return: end_time_unix_sec: Last tracking time for SPC date.
    """

    polygon_file_names = find_polygon_files_for_spc_date(
        spc_date_string=spc_date_string,
        top_raw_directory_name=top_raw_directory_name,
        tracking_scale_metres2=tracking_scale_metres2)

    first_metadata_dict = myrorss_and_mrms_io.read_metadata_from_raw_file(
        polygon_file_names[0], data_source=radar_utils.MYRORSS_SOURCE_ID)
    start_time_unix_sec = first_metadata_dict[radar_utils.UNIX_TIME_COLUMN]

    last_metadata_dict = myrorss_and_mrms_io.read_metadata_from_raw_file(
        polygon_file_names[-1], data_source=radar_utils.MYRORSS_SOURCE_ID)
    end_time_unix_sec = last_metadata_dict[radar_utils.UNIX_TIME_COLUMN]

    return start_time_unix_sec, end_time_unix_sec
Exemplo n.º 2
0
def _read_file(radar_file_name):
    """Reads radar data from file.

    M = number of rows in grid
    N = number of columns in grid

    :param radar_file_name: Path to input file.
    :return: reflectivity_matrix_dbz: M-by-N numpy array of reflectivity values.
    :return: latitudes_deg: length-M numpy array of latitudes (deg N).
    :return: longitudes_deg: length-N numpy array of longitudes (deg E).
    """

    metadata_dict = myrorss_and_mrms_io.read_metadata_from_raw_file(
        netcdf_file_name=radar_file_name,
        data_source=radar_utils.MYRORSS_SOURCE_ID)
    sparse_grid_table = myrorss_and_mrms_io.read_data_from_sparse_grid_file(
        netcdf_file_name=radar_file_name,
        field_name_orig=metadata_dict[
            myrorss_and_mrms_io.FIELD_NAME_COLUMN_ORIG],
        data_source=radar_utils.MYRORSS_SOURCE_ID,
        sentinel_values=metadata_dict[radar_utils.SENTINEL_VALUE_COLUMN])
    reflectivity_matrix_dbz, latitudes_deg, longitudes_deg = (
        radar_s2f.sparse_to_full_grid(sparse_grid_table=sparse_grid_table,
                                      metadata_dict=metadata_dict))

    reflectivity_matrix_dbz = numpy.flipud(reflectivity_matrix_dbz)
    latitudes_deg = latitudes_deg[::-1]

    return reflectivity_matrix_dbz, latitudes_deg, longitudes_deg
def get_echo_tops(
        unix_time_sec,
        spc_date_string,
        top_directory_name,
        critical_reflectivity_dbz,
        top_height_to_consider_m_asl=DEFAULT_TOP_INPUT_HEIGHT_FOR_ECHO_TOPS_M_ASL,
        lowest_refl_to_consider_dbz=None):
    """Finds echo top at each horizontal location.

    "Echo top" is max height with reflectivity >= critical reflectivity.

    M = number of rows (unique grid-point latitudes)
    N = number of columns (unique grid-point longitudes)

    :param unix_time_sec: Valid time.
    :param spc_date_string: SPC date (format "yyyymmdd").
    :param top_directory_name: Name of top-level directory with MYRORSS files.
    :param critical_reflectivity_dbz: Critical reflectivity (used to define echo
        top).
    :param top_height_to_consider_m_asl: Top height level to consider (metres
        above sea level).
    :param lowest_refl_to_consider_dbz: Lowest reflectivity to consider in echo
        top calculations.  If None, will consider all reflectivities.
    :return: echo_top_matrix_m_asl: M-by-N matrix of echo tops (metres above sea
        level).  Latitude increases down each column, and longitude increases to
        the right along each row.
    :return: grid_point_latitudes_deg: length-M numpy array with latitudes
        (deg N) of grid points, sorted in ascending order.
    :return: grid_point_longitudes_deg: length-N numpy array with longitudes
        (deg E) of grid points, sorted in ascending order.
    :return: metadata_dict: Dictionary created by
        `myrorss_and_mrms_io.read_metadata_from_raw_file` for column-max
        reflectivity.
    """

    error_checking.assert_is_greater(critical_reflectivity_dbz, 0.)
    error_checking.assert_is_greater(top_height_to_consider_m_asl, 0)
    top_height_to_consider_m_asl = int(
        numpy.round(top_height_to_consider_m_asl))

    if lowest_refl_to_consider_dbz is None:
        lowest_refl_to_consider_dbz = 0.
    error_checking.assert_is_less_than(lowest_refl_to_consider_dbz,
                                       critical_reflectivity_dbz)

    grid_point_heights_m_asl = radar_utils.get_valid_heights(
        data_source=radar_utils.MYRORSS_SOURCE_ID,
        field_name=radar_utils.REFL_NAME)
    grid_point_heights_m_asl = grid_point_heights_m_asl[
        grid_point_heights_m_asl <= top_height_to_consider_m_asl]

    column_max_refl_file_name = myrorss_and_mrms_io.find_raw_file(
        unix_time_sec=unix_time_sec,
        spc_date_string=spc_date_string,
        field_name=radar_utils.REFL_COLUMN_MAX_NAME,
        data_source=radar_utils.MYRORSS_SOURCE_ID,
        top_directory_name=top_directory_name)

    num_grid_heights = len(grid_point_heights_m_asl)
    single_height_refl_file_names = [''] * num_grid_heights
    for k in range(num_grid_heights):
        single_height_refl_file_names[k] = myrorss_and_mrms_io.find_raw_file(
            unix_time_sec=unix_time_sec,
            spc_date_string=spc_date_string,
            field_name=radar_utils.REFL_NAME,
            data_source=radar_utils.MYRORSS_SOURCE_ID,
            top_directory_name=top_directory_name,
            height_m_asl=grid_point_heights_m_asl[k])

    print 'Reading "{0:s}" for echo-top calculation...'.format(
        column_max_refl_file_name)

    metadata_dict = myrorss_and_mrms_io.read_metadata_from_raw_file(
        column_max_refl_file_name, data_source=radar_utils.MYRORSS_SOURCE_ID)
    this_sparse_grid_table = (
        myrorss_and_mrms_io.read_data_from_sparse_grid_file(
            column_max_refl_file_name,
            field_name_orig=metadata_dict[
                myrorss_and_mrms_io.FIELD_NAME_COLUMN_ORIG],
            data_source=radar_utils.MYRORSS_SOURCE_ID,
            sentinel_values=metadata_dict[radar_utils.SENTINEL_VALUE_COLUMN]))

    (column_max_refl_matrix_dbz, grid_point_latitudes_deg,
     grid_point_longitudes_deg) = radar_s2f.sparse_to_full_grid(
         this_sparse_grid_table, metadata_dict)

    num_grid_rows = len(grid_point_latitudes_deg)
    num_grid_columns = len(grid_point_longitudes_deg)
    linear_indices_to_consider = numpy.where(
        numpy.reshape(column_max_refl_matrix_dbz, num_grid_rows *
                      num_grid_columns) >= critical_reflectivity_dbz)[0]

    print(
        'Echo-top calculation is needed at only {0:d}/{1:d} horizontal grid '
        'points!').format(len(linear_indices_to_consider),
                          num_grid_rows * num_grid_columns)

    echo_top_matrix_m_asl = numpy.full((num_grid_rows, num_grid_columns),
                                       numpy.nan)
    num_horiz_points_to_consider = len(linear_indices_to_consider)
    if num_horiz_points_to_consider == 0:
        return echo_top_matrix_m_asl

    grid_rows_to_consider, grid_columns_to_consider = numpy.unravel_index(
        linear_indices_to_consider, (num_grid_rows, num_grid_columns))
    reflectivity_matrix_dbz = numpy.full(
        (num_grid_heights, num_horiz_points_to_consider), numpy.nan)

    for k in range(num_grid_heights):
        print 'Reading "{0:s}" for echo-top calculation...'.format(
            single_height_refl_file_names[k])

        this_metadata_dict = myrorss_and_mrms_io.read_metadata_from_raw_file(
            single_height_refl_file_names[k],
            data_source=radar_utils.MYRORSS_SOURCE_ID)
        this_sparse_grid_table = (
            myrorss_and_mrms_io.read_data_from_sparse_grid_file(
                single_height_refl_file_names[k],
                field_name_orig=this_metadata_dict[
                    myrorss_and_mrms_io.FIELD_NAME_COLUMN_ORIG],
                data_source=radar_utils.MYRORSS_SOURCE_ID,
                sentinel_values=this_metadata_dict[
                    radar_utils.SENTINEL_VALUE_COLUMN]))

        this_reflectivity_matrix_dbz, _, _ = radar_s2f.sparse_to_full_grid(
            this_sparse_grid_table,
            this_metadata_dict,
            ignore_if_below=lowest_refl_to_consider_dbz)
        reflectivity_matrix_dbz[k, :] = this_reflectivity_matrix_dbz[
            grid_rows_to_consider, grid_columns_to_consider]

    print 'Computing echo tops at the {0:d} horizontal grid points...'.format(
        num_horiz_points_to_consider)

    for i in range(num_horiz_points_to_consider):
        echo_top_matrix_m_asl[
            grid_rows_to_consider[i], grid_columns_to_consider[i]] = (
                radar_utils.get_echo_top_single_column(
                    reflectivities_dbz=reflectivity_matrix_dbz[:, i],
                    heights_m_asl=grid_point_heights_m_asl,
                    critical_reflectivity_dbz=critical_reflectivity_dbz))

    return (numpy.flipud(echo_top_matrix_m_asl),
            grid_point_latitudes_deg[::-1], grid_point_longitudes_deg,
            metadata_dict)
Exemplo n.º 4
0
def get_storm_based_radar_stats_myrorss_or_mrms(
        storm_object_table,
        top_radar_dir_name,
        radar_metadata_dict_for_tracking,
        statistic_names=DEFAULT_STATISTIC_NAMES,
        percentile_levels=DEFAULT_PERCENTILE_LEVELS,
        radar_field_names=DEFAULT_FIELDS_FOR_MYRORSS_AND_MRMS,
        reflectivity_heights_m_asl=None,
        radar_source=radar_utils.MYRORSS_SOURCE_ID,
        dilate_azimuthal_shear=False,
        dilation_half_width_in_pixels=dilation.DEFAULT_HALF_WIDTH,
        dilation_percentile_level=DEFAULT_DILATION_PERCENTILE_LEVEL):
    """Computes radar statistics for each storm object.

    In this case, radar data must be from MYRORSS or MRMS.

    N = number of storm objects
    P = number of field/height pairs
    S = number of statistics (percentile- and non-percentile-based)

    :param storm_object_table: See documentation for
        `get_storm_based_radar_stats_gridrad`.
    :param top_radar_dir_name: See doc for
        `get_storm_based_radar_stats_gridrad`.
    :param radar_metadata_dict_for_tracking: Dictionary created by
        `myrorss_and_mrms_io.read_metadata_from_raw_file`, describing radar grid
        used to create storm objects.
    :param statistic_names: 1-D list of non-percentile-based statistics.
    :param percentile_levels: 1-D numpy array of percentile levels.
    :param radar_field_names: 1-D list of radar fields for which stats will be
        computed.
    :param reflectivity_heights_m_asl: 1-D numpy array of heights (metres above
        sea level) for the field "reflectivity_dbz".  If "reflectivity_dbz" is
        not in `radar_field_names`, you can leave this as None.
    :param radar_source: Source of radar data (either "myrorss" or "mrms").
    :param dilate_azimuthal_shear: Boolean flag.  If False, azimuthal-shear
        stats will be based only on values inside the storm object.  If True,
        azimuthal-shear fields will be dilated, so azimuthal-shear stats will be
        based on values inside and near the storm object.  This is useful
        because sometimes large az-shear values occur just outside the storm
        object.
    :param dilation_half_width_in_pixels: See documentation for
        `dilation.dilate_2d_matrix`.
    :param dilation_percentile_level: See documentation for
        `dilation.dilate_2d_matrix`.
    :return: storm_object_statistic_table: pandas DataFrame with 2 + S * P
        columns.  The last S * P columns are one for each statistic-field-height
        tuple.  Names of these columns are determined by
        `radar_field_and_statistic_to_column_name` and
        `radar_field_and_percentile_to_column_name`.  The first 2 columns are
        listed below.
    storm_object_statistic_table.full_id_string: Storm ID (taken from input
        table).
    storm_object_statistic_table.unix_time_sec: Valid time (taken from input
        table).
    """

    error_checking.assert_is_boolean(dilate_azimuthal_shear)
    percentile_levels = _check_statistic_params(statistic_names,
                                                percentile_levels)

    # Find radar files.
    spc_date_strings = (
        storm_object_table[tracking_utils.SPC_DATE_COLUMN].values.tolist())

    file_dictionary = myrorss_and_mrms_io.find_many_raw_files(
        desired_times_unix_sec=storm_object_table[
            tracking_utils.VALID_TIME_COLUMN].values.astype(int),
        spc_date_strings=spc_date_strings,
        data_source=radar_source,
        field_names=radar_field_names,
        top_directory_name=top_radar_dir_name,
        reflectivity_heights_m_asl=reflectivity_heights_m_asl)

    radar_file_name_matrix = file_dictionary[
        myrorss_and_mrms_io.RADAR_FILE_NAMES_KEY]
    radar_field_name_by_pair = file_dictionary[
        myrorss_and_mrms_io.FIELD_NAME_BY_PAIR_KEY]
    radar_height_by_pair_m_asl = file_dictionary[
        myrorss_and_mrms_io.HEIGHT_BY_PAIR_KEY]

    valid_times_unix_sec = file_dictionary[
        myrorss_and_mrms_io.UNIQUE_TIMES_KEY]
    valid_spc_date_strings = [
        time_conversion.time_to_spc_date_string(t) for t in file_dictionary[
            myrorss_and_mrms_io.SPC_DATES_AT_UNIQUE_TIMES_KEY]
    ]

    # Initialize output.
    num_field_height_pairs = len(radar_field_name_by_pair)
    num_valid_times = len(valid_times_unix_sec)
    num_statistics = len(statistic_names)
    num_percentiles = len(percentile_levels)
    num_storm_objects = len(storm_object_table.index)

    statistic_matrix = numpy.full(
        (num_storm_objects, num_field_height_pairs, num_statistics), numpy.nan)
    percentile_matrix = numpy.full(
        (num_storm_objects, num_field_height_pairs, num_percentiles),
        numpy.nan)

    valid_time_strings = [
        time_conversion.unix_sec_to_string(t, DEFAULT_TIME_FORMAT)
        for t in valid_times_unix_sec
    ]

    for j in range(num_field_height_pairs):
        for i in range(num_valid_times):
            if radar_file_name_matrix[i, j] is None:
                continue

            print(
                ('Computing stats for "{0:s}" at {1:d} metres ASL and {2:s}...'
                 ).format(radar_field_name_by_pair[j],
                          int(numpy.round(radar_height_by_pair_m_asl[j])),
                          valid_time_strings[i]))

            this_metadata_dict = (
                myrorss_and_mrms_io.read_metadata_from_raw_file(
                    radar_file_name_matrix[i, j], data_source=radar_source))

            if radar_metadata_dict_for_tracking is None:
                this_storm_to_grid_points_table = storm_object_table[
                    STORM_OBJECT_TO_GRID_PTS_COLUMNS]
            else:
                this_storm_to_grid_points_table = (
                    get_grid_points_in_storm_objects(
                        storm_object_table=storm_object_table,
                        orig_grid_metadata_dict=
                        radar_metadata_dict_for_tracking,
                        new_grid_metadata_dict=this_metadata_dict))

            # Read data for [j]th field/height pair at [i]th time step.
            sparse_grid_table_this_field_height = (
                myrorss_and_mrms_io.read_data_from_sparse_grid_file(
                    radar_file_name_matrix[i, j],
                    field_name_orig=this_metadata_dict[
                        myrorss_and_mrms_io.FIELD_NAME_COLUMN_ORIG],
                    data_source=radar_source,
                    sentinel_values=this_metadata_dict[
                        radar_utils.SENTINEL_VALUE_COLUMN]))

            radar_matrix_this_field_height = radar_s2f.sparse_to_full_grid(
                sparse_grid_table_this_field_height, this_metadata_dict)[0]

            if (dilate_azimuthal_shear and radar_field_name_by_pair[j]
                    in AZIMUTHAL_SHEAR_FIELD_NAMES):
                print('Dilating azimuthal-shear field...')

                radar_matrix_this_field_height = dilation.dilate_2d_matrix(
                    radar_matrix_this_field_height,
                    percentile_level=dilation_percentile_level,
                    half_width_in_pixels=dilation_half_width_in_pixels,
                    take_largest_absolute_value=True)

            radar_matrix_this_field_height[numpy.isnan(
                radar_matrix_this_field_height)] = 0.

            # Find storm objects at [i]th valid time.
            these_storm_flags = numpy.logical_and(
                storm_object_table[tracking_utils.VALID_TIME_COLUMN].values ==
                valid_times_unix_sec[i],
                storm_object_table[tracking_utils.SPC_DATE_COLUMN].values ==
                valid_spc_date_strings[i])

            these_storm_indices = numpy.where(these_storm_flags)[0]

            # Extract storm-based radar stats for [j]th field/height pair at
            # [i]th time step.
            for this_storm_index in these_storm_indices:
                radar_values_this_storm = extract_radar_grid_points(
                    radar_matrix_this_field_height,
                    row_indices=this_storm_to_grid_points_table[
                        tracking_utils.ROWS_IN_STORM_COLUMN].
                    values[this_storm_index].astype(int),
                    column_indices=this_storm_to_grid_points_table[
                        tracking_utils.COLUMNS_IN_STORM_COLUMN].
                    values[this_storm_index].astype(int))

                (statistic_matrix[this_storm_index, j, :],
                 percentile_matrix[this_storm_index,
                                   j, :]) = get_spatial_statistics(
                                       radar_values_this_storm,
                                       statistic_names=statistic_names,
                                       percentile_levels=percentile_levels)

    # Create pandas DataFrame.
    storm_object_statistic_dict = {}

    for j in range(num_field_height_pairs):
        for k in range(num_statistics):
            this_column_name = radar_field_and_statistic_to_column_name(
                radar_field_name=radar_field_name_by_pair[j],
                radar_height_m_asl=radar_height_by_pair_m_asl[j],
                statistic_name=statistic_names[k])

            storm_object_statistic_dict.update(
                {this_column_name: statistic_matrix[:, j, k]})

        for k in range(num_percentiles):
            this_column_name = radar_field_and_percentile_to_column_name(
                radar_field_name=radar_field_name_by_pair[j],
                radar_height_m_asl=radar_height_by_pair_m_asl[j],
                percentile_level=percentile_levels[k])

            storm_object_statistic_dict.update(
                {this_column_name: percentile_matrix[:, j, k]})

    storm_object_statistic_table = pandas.DataFrame.from_dict(
        storm_object_statistic_dict)

    return pandas.concat([
        storm_object_table[STORM_COLUMNS_TO_KEEP], storm_object_statistic_table
    ],
                         axis=1)
Exemplo n.º 5
0
def _run(top_tracking_dir_name, first_spc_date_string, last_spc_date_string,
         storm_colour, storm_opacity, include_secondary_ids,
         min_plot_latitude_deg, max_plot_latitude_deg, min_plot_longitude_deg,
         max_plot_longitude_deg, top_myrorss_dir_name, radar_field_name,
         radar_height_m_asl, output_dir_name):
    """Plots storm outlines (along with IDs) at each time step.

    This is effectively the main method.

    :param top_tracking_dir_name: See documentation at top of file.
    :param first_spc_date_string: Same.
    :param last_spc_date_string: Same.
    :param storm_colour: Same.
    :param storm_opacity: Same.
    :param include_secondary_ids: Same.
    :param min_plot_latitude_deg: Same.
    :param max_plot_latitude_deg: Same.
    :param min_plot_longitude_deg: Same.
    :param max_plot_longitude_deg: Same.
    :param top_myrorss_dir_name: Same.
    :param radar_field_name: Same.
    :param radar_height_m_asl: Same.
    :param output_dir_name: Same.
    """

    if top_myrorss_dir_name in ['', 'None']:
        top_myrorss_dir_name = None

    if radar_field_name != radar_utils.REFL_NAME:
        radar_height_m_asl = None

    file_system_utils.mkdir_recursive_if_necessary(
        directory_name=output_dir_name)

    spc_date_strings = time_conversion.get_spc_dates_in_range(
        first_spc_date_string=first_spc_date_string,
        last_spc_date_string=last_spc_date_string)

    tracking_file_names = []

    for this_spc_date_string in spc_date_strings:
        tracking_file_names += (tracking_io.find_files_one_spc_date(
            top_tracking_dir_name=top_tracking_dir_name,
            tracking_scale_metres2=DUMMY_TRACKING_SCALE_METRES2,
            source_name=DUMMY_SOURCE_NAME,
            spc_date_string=this_spc_date_string,
            raise_error_if_missing=False)[0])

    storm_object_table = tracking_io.read_many_files(tracking_file_names)
    print(SEPARATOR_STRING)

    latitude_limits_deg, longitude_limits_deg = _get_plotting_limits(
        min_plot_latitude_deg=min_plot_latitude_deg,
        max_plot_latitude_deg=max_plot_latitude_deg,
        min_plot_longitude_deg=min_plot_longitude_deg,
        max_plot_longitude_deg=max_plot_longitude_deg,
        storm_object_table=storm_object_table)

    min_plot_latitude_deg = latitude_limits_deg[0]
    max_plot_latitude_deg = latitude_limits_deg[1]
    min_plot_longitude_deg = longitude_limits_deg[0]
    max_plot_longitude_deg = longitude_limits_deg[1]

    valid_times_unix_sec = numpy.unique(
        storm_object_table[tracking_utils.VALID_TIME_COLUMN].values)
    num_times = len(valid_times_unix_sec)

    for i in range(num_times):
        these_current_rows = numpy.where(
            storm_object_table[tracking_utils.VALID_TIME_COLUMN].values ==
            valid_times_unix_sec[i])[0]

        these_current_subrows = _filter_storm_objects_latlng(
            storm_object_table=storm_object_table.iloc[these_current_rows],
            min_latitude_deg=min_plot_latitude_deg,
            max_latitude_deg=max_plot_latitude_deg,
            min_longitude_deg=min_plot_longitude_deg,
            max_longitude_deg=max_plot_longitude_deg)

        if len(these_current_subrows) == 0:
            continue

        these_current_rows = these_current_rows[these_current_subrows]

        this_storm_object_table = _find_relevant_storm_objects(
            storm_object_table=storm_object_table,
            current_rows=these_current_rows)

        these_latlng_rows = _filter_storm_objects_latlng(
            storm_object_table=this_storm_object_table,
            min_latitude_deg=min_plot_latitude_deg,
            max_latitude_deg=max_plot_latitude_deg,
            min_longitude_deg=min_plot_longitude_deg,
            max_longitude_deg=max_plot_longitude_deg)

        if top_myrorss_dir_name is None:
            this_radar_matrix = None
            these_radar_latitudes_deg = None
            these_radar_longitudes_deg = None
        else:
            this_myrorss_file_name = myrorss_and_mrms_io.find_raw_file(
                top_directory_name=top_myrorss_dir_name,
                unix_time_sec=valid_times_unix_sec[i],
                spc_date_string=time_conversion.time_to_spc_date_string(
                    valid_times_unix_sec[i]),
                field_name=radar_field_name,
                data_source=radar_utils.MYRORSS_SOURCE_ID,
                height_m_asl=radar_height_m_asl,
                raise_error_if_missing=True)

            print(
                'Reading data from: "{0:s}"...'.format(this_myrorss_file_name))

            this_metadata_dict = (
                myrorss_and_mrms_io.read_metadata_from_raw_file(
                    netcdf_file_name=this_myrorss_file_name,
                    data_source=radar_utils.MYRORSS_SOURCE_ID))

            this_sparse_grid_table = (
                myrorss_and_mrms_io.read_data_from_sparse_grid_file(
                    netcdf_file_name=this_myrorss_file_name,
                    field_name_orig=this_metadata_dict[
                        myrorss_and_mrms_io.FIELD_NAME_COLUMN_ORIG],
                    data_source=radar_utils.MYRORSS_SOURCE_ID,
                    sentinel_values=this_metadata_dict[
                        radar_utils.SENTINEL_VALUE_COLUMN]))

            (this_radar_matrix, these_radar_latitudes_deg,
             these_radar_longitudes_deg) = radar_s2f.sparse_to_full_grid(
                 sparse_grid_table=this_sparse_grid_table,
                 metadata_dict=this_metadata_dict)

            this_radar_matrix = numpy.flipud(this_radar_matrix)
            these_radar_latitudes_deg = these_radar_latitudes_deg[::-1]

        _, this_axes_object, this_basemap_object = (
            plotting_utils.create_equidist_cylindrical_map(
                min_latitude_deg=min_plot_latitude_deg,
                max_latitude_deg=max_plot_latitude_deg,
                min_longitude_deg=min_plot_longitude_deg,
                max_longitude_deg=max_plot_longitude_deg,
                resolution_string='i'))

        _plot_storm_outlines_one_time(
            storm_object_table=this_storm_object_table.iloc[these_latlng_rows],
            valid_time_unix_sec=valid_times_unix_sec[i],
            axes_object=this_axes_object,
            basemap_object=this_basemap_object,
            storm_colour=storm_colour,
            storm_opacity=storm_opacity,
            include_secondary_ids=include_secondary_ids,
            output_dir_name=output_dir_name,
            radar_matrix=this_radar_matrix,
            radar_field_name=radar_field_name,
            radar_latitudes_deg=these_radar_latitudes_deg,
            radar_longitudes_deg=these_radar_longitudes_deg)
def _run_for_myrorss(spc_date_string, top_radar_dir_name_tarred,
                     top_radar_dir_name_untarred, top_output_dir_name,
                     option_dict):
    """Runs echo classification for MYRORSS data.

    :param spc_date_string: See documentation at top of file.
    :param top_radar_dir_name_tarred: Same.
    :param top_radar_dir_name_untarred: Same.
    :param top_output_dir_name: Same.
    :param option_dict: See doc for
        `echo_classification.find_convective_pixels`.
    """

    if top_radar_dir_name_tarred in ['', 'None']:
        top_radar_dir_name_tarred = None

    if top_radar_dir_name_tarred is not None:
        tar_file_name = '{0:s}/{1:s}/{2:s}.tar'.format(
            top_radar_dir_name_tarred, spc_date_string[:4], spc_date_string)

        myrorss_io.unzip_1day_tar_file(
            tar_file_name=tar_file_name,
            field_names=[radar_utils.REFL_NAME],
            spc_date_string=spc_date_string,
            top_target_directory_name=top_radar_dir_name_untarred,
            refl_heights_m_asl=RADAR_HEIGHTS_M_ASL)
        print SEPARATOR_STRING

    these_file_names = myrorss_and_mrms_io.find_raw_files_one_spc_date(
        spc_date_string=spc_date_string,
        field_name=radar_utils.REFL_NAME,
        data_source=radar_utils.MYRORSS_SOURCE_ID,
        top_directory_name=top_radar_dir_name_untarred,
        height_m_asl=RADAR_HEIGHTS_M_ASL[0],
        raise_error_if_missing=True)

    valid_times_unix_sec = numpy.array([
        myrorss_and_mrms_io.raw_file_name_to_time(f) for f in these_file_names
    ],
                                       dtype=int)

    valid_times_unix_sec = numpy.sort(valid_times_unix_sec)
    start_time_unix_sec = time_conversion.get_start_of_spc_date(
        spc_date_string)
    end_time_unix_sec = time_conversion.get_end_of_spc_date(spc_date_string)

    good_indices = numpy.where(
        numpy.logical_and(valid_times_unix_sec >= start_time_unix_sec,
                          valid_times_unix_sec <= end_time_unix_sec))[0]

    valid_times_unix_sec = valid_times_unix_sec[good_indices]

    radar_file_dict = myrorss_and_mrms_io.find_many_raw_files(
        desired_times_unix_sec=valid_times_unix_sec,
        spc_date_strings=[spc_date_string] * len(valid_times_unix_sec),
        data_source=radar_utils.MYRORSS_SOURCE_ID,
        field_names=[radar_utils.REFL_NAME],
        top_directory_name=top_radar_dir_name_untarred,
        reflectivity_heights_m_asl=RADAR_HEIGHTS_M_ASL)

    radar_file_name_matrix = radar_file_dict[
        myrorss_and_mrms_io.RADAR_FILE_NAMES_KEY]
    valid_times_unix_sec = radar_file_dict[
        myrorss_and_mrms_io.UNIQUE_TIMES_KEY]

    num_times = len(valid_times_unix_sec)
    num_heights = len(RADAR_HEIGHTS_M_ASL)

    for i in range(num_times):
        reflectivity_matrix_dbz = None
        fine_grid_point_latitudes_deg = None
        fine_grid_point_longitudes_deg = None

        found_corrupt_file = False

        for j in range(num_heights):
            print 'Reading data from: "{0:s}"...'.format(
                radar_file_name_matrix[i, j])

            this_metadata_dict = (
                myrorss_and_mrms_io.read_metadata_from_raw_file(
                    netcdf_file_name=radar_file_name_matrix[i, j],
                    data_source=radar_utils.MYRORSS_SOURCE_ID,
                    raise_error_if_fails=False))

            if this_metadata_dict is None:
                found_corrupt_file = True
                break

            this_sparse_grid_table = (
                myrorss_and_mrms_io.read_data_from_sparse_grid_file(
                    netcdf_file_name=radar_file_name_matrix[i, j],
                    field_name_orig=this_metadata_dict[
                        myrorss_and_mrms_io.FIELD_NAME_COLUMN_ORIG],
                    data_source=radar_utils.MYRORSS_SOURCE_ID,
                    sentinel_values=this_metadata_dict[
                        radar_utils.SENTINEL_VALUE_COLUMN]))

            (this_refl_matrix_dbz, fine_grid_point_latitudes_deg,
             fine_grid_point_longitudes_deg) = radar_s2f.sparse_to_full_grid(
                 sparse_grid_table=this_sparse_grid_table,
                 metadata_dict=this_metadata_dict)

            this_refl_matrix_dbz = numpy.expand_dims(
                this_refl_matrix_dbz[::2, ::2], axis=-1)

            if reflectivity_matrix_dbz is None:
                reflectivity_matrix_dbz = this_refl_matrix_dbz + 0.
            else:
                reflectivity_matrix_dbz = numpy.concatenate(
                    (reflectivity_matrix_dbz, this_refl_matrix_dbz), axis=-1)

        print '\n'
        if found_corrupt_file:
            continue

        reflectivity_matrix_dbz = numpy.flip(reflectivity_matrix_dbz, axis=0)
        fine_grid_point_latitudes_deg = fine_grid_point_latitudes_deg[::-1]
        coarse_grid_point_latitudes_deg = fine_grid_point_latitudes_deg[::2]
        coarse_grid_point_longitudes_deg = fine_grid_point_longitudes_deg[::2]

        coarse_grid_metadata_dict = {
            echo_classifn.MIN_LATITUDE_KEY:
            numpy.min(coarse_grid_point_latitudes_deg),
            echo_classifn.LATITUDE_SPACING_KEY:
            (coarse_grid_point_latitudes_deg[1] -
             coarse_grid_point_latitudes_deg[0]),
            echo_classifn.MIN_LONGITUDE_KEY:
            numpy.min(coarse_grid_point_longitudes_deg),
            echo_classifn.LONGITUDE_SPACING_KEY:
            (coarse_grid_point_longitudes_deg[1] -
             coarse_grid_point_longitudes_deg[0]),
            echo_classifn.HEIGHTS_KEY:
            RADAR_HEIGHTS_M_ASL
        }

        fine_grid_metadata_dict = {
            echo_classifn.MIN_LATITUDE_KEY:
            numpy.min(fine_grid_point_latitudes_deg),
            echo_classifn.LATITUDE_SPACING_KEY:
            (fine_grid_point_latitudes_deg[1] -
             fine_grid_point_latitudes_deg[0]),
            echo_classifn.MIN_LONGITUDE_KEY:
            numpy.min(fine_grid_point_longitudes_deg),
            echo_classifn.LONGITUDE_SPACING_KEY:
            (fine_grid_point_longitudes_deg[1] -
             fine_grid_point_longitudes_deg[0]),
            echo_classifn.HEIGHTS_KEY:
            RADAR_HEIGHTS_M_ASL
        }

        convective_flag_matrix = echo_classifn.find_convective_pixels(
            reflectivity_matrix_dbz=reflectivity_matrix_dbz,
            grid_metadata_dict=coarse_grid_metadata_dict,
            valid_time_unix_sec=valid_times_unix_sec[i],
            option_dict=option_dict)

        print 'Number of convective pixels = {0:d}\n'.format(
            numpy.sum(convective_flag_matrix))

        convective_flag_matrix = echo_classifn._double_class_resolution(
            coarse_convective_flag_matrix=convective_flag_matrix,
            coarse_grid_point_latitudes_deg=coarse_grid_point_latitudes_deg,
            coarse_grid_point_longitudes_deg=coarse_grid_point_longitudes_deg,
            fine_grid_point_latitudes_deg=fine_grid_point_latitudes_deg,
            fine_grid_point_longitudes_deg=fine_grid_point_longitudes_deg)

        this_output_file_name = echo_classifn.find_classification_file(
            top_directory_name=top_output_dir_name,
            valid_time_unix_sec=valid_times_unix_sec[i],
            desire_zipped=False,
            allow_zipped_or_unzipped=False,
            raise_error_if_missing=False)

        print 'Writing echo classifications to: "{0:s}"...'.format(
            this_output_file_name)

        echo_classifn.write_classifications(
            convective_flag_matrix=convective_flag_matrix,
            grid_metadata_dict=fine_grid_metadata_dict,
            valid_time_unix_sec=valid_times_unix_sec[i],
            option_dict=option_dict,
            netcdf_file_name=this_output_file_name)

        unzipping.gzip_file(input_file_name=this_output_file_name,
                            delete_input_file=True)

        print SEPARATOR_STRING

    if top_radar_dir_name_tarred is None:
        return

    myrorss_io.remove_unzipped_data_1day(
        spc_date_string=spc_date_string,
        top_directory_name=top_radar_dir_name_untarred,
        field_names=[radar_utils.REFL_NAME],
        refl_heights_m_asl=RADAR_HEIGHTS_M_ASL)
    print SEPARATOR_STRING
def _plot_tornado_and_radar(top_myrorss_dir_name, radar_field_name,
                            radar_height_m_asl, spc_date_string, tornado_table,
                            tornado_row, output_file_name):
    """Plots one unlinked tornado with radar field.

    :param top_myrorss_dir_name: See documentation at top of file.
    :param radar_field_name: Same.
    :param radar_height_m_asl: Same.
    :param spc_date_string: SPC date for linkage file (format "yyyymmdd").
    :param tornado_table: pandas DataFrame created by
        `linkage._read_input_tornado_reports`.
    :param tornado_row: Will plot only tornado in [j]th row of table, where j =
        `tornado_row`.
    :param output_file_name: Path to output file.  Figure will be saved here.
    """

    tornado_time_unix_sec = tornado_table[
        linkage.EVENT_TIME_COLUMN].values[tornado_row]

    radar_time_unix_sec = number_rounding.round_to_nearest(
        tornado_time_unix_sec, RADAR_TIME_INTERVAL_SEC)

    radar_spc_date_string = time_conversion.time_to_spc_date_string(
        radar_time_unix_sec)

    radar_file_name = myrorss_and_mrms_io.find_raw_file(
        top_directory_name=top_myrorss_dir_name,
        spc_date_string=radar_spc_date_string,
        unix_time_sec=radar_time_unix_sec,
        data_source=radar_utils.MYRORSS_SOURCE_ID,
        field_name=radar_field_name,
        height_m_asl=radar_height_m_asl,
        raise_error_if_missing=spc_date_string == radar_spc_date_string)

    if not os.path.isfile(radar_file_name):
        first_radar_time_unix_sec = number_rounding.ceiling_to_nearest(
            time_conversion.get_start_of_spc_date(spc_date_string),
            RADAR_TIME_INTERVAL_SEC)

        last_radar_time_unix_sec = number_rounding.floor_to_nearest(
            time_conversion.get_end_of_spc_date(spc_date_string),
            RADAR_TIME_INTERVAL_SEC)

        radar_time_unix_sec = max(
            [radar_time_unix_sec, first_radar_time_unix_sec])

        radar_time_unix_sec = min(
            [radar_time_unix_sec, last_radar_time_unix_sec])

        radar_file_name = myrorss_and_mrms_io.find_raw_file(
            top_directory_name=top_myrorss_dir_name,
            spc_date_string=spc_date_string,
            unix_time_sec=radar_time_unix_sec,
            data_source=radar_utils.MYRORSS_SOURCE_ID,
            field_name=radar_field_name,
            height_m_asl=radar_height_m_asl,
            raise_error_if_missing=True)

    radar_metadata_dict = myrorss_and_mrms_io.read_metadata_from_raw_file(
        netcdf_file_name=radar_file_name,
        data_source=radar_utils.MYRORSS_SOURCE_ID)

    sparse_grid_table = (myrorss_and_mrms_io.read_data_from_sparse_grid_file(
        netcdf_file_name=radar_file_name,
        field_name_orig=radar_metadata_dict[
            myrorss_and_mrms_io.FIELD_NAME_COLUMN_ORIG],
        data_source=radar_utils.MYRORSS_SOURCE_ID,
        sentinel_values=radar_metadata_dict[radar_utils.SENTINEL_VALUE_COLUMN])
                         )

    radar_matrix, grid_point_latitudes_deg, grid_point_longitudes_deg = (
        radar_s2f.sparse_to_full_grid(sparse_grid_table=sparse_grid_table,
                                      metadata_dict=radar_metadata_dict))

    radar_matrix = numpy.flip(radar_matrix, axis=0)
    grid_point_latitudes_deg = grid_point_latitudes_deg[::-1]

    axes_object, basemap_object = (
        plotting_utils.create_equidist_cylindrical_map(
            min_latitude_deg=numpy.min(grid_point_latitudes_deg),
            max_latitude_deg=numpy.max(grid_point_latitudes_deg),
            min_longitude_deg=numpy.min(grid_point_longitudes_deg),
            max_longitude_deg=numpy.max(grid_point_longitudes_deg),
            resolution_string='i')[1:])

    plotting_utils.plot_coastlines(basemap_object=basemap_object,
                                   axes_object=axes_object,
                                   line_colour=BORDER_COLOUR)

    plotting_utils.plot_countries(basemap_object=basemap_object,
                                  axes_object=axes_object,
                                  line_colour=BORDER_COLOUR)

    plotting_utils.plot_states_and_provinces(basemap_object=basemap_object,
                                             axes_object=axes_object,
                                             line_colour=BORDER_COLOUR)

    plotting_utils.plot_parallels(basemap_object=basemap_object,
                                  axes_object=axes_object,
                                  num_parallels=NUM_PARALLELS)

    plotting_utils.plot_meridians(basemap_object=basemap_object,
                                  axes_object=axes_object,
                                  num_meridians=NUM_MERIDIANS)

    radar_plotting.plot_latlng_grid(
        field_matrix=radar_matrix,
        field_name=radar_field_name,
        axes_object=axes_object,
        min_grid_point_latitude_deg=numpy.min(grid_point_latitudes_deg),
        min_grid_point_longitude_deg=numpy.min(grid_point_longitudes_deg),
        latitude_spacing_deg=numpy.diff(grid_point_latitudes_deg[:2])[0],
        longitude_spacing_deg=numpy.diff(grid_point_longitudes_deg[:2])[0])

    tornado_latitude_deg = tornado_table[
        linkage.EVENT_LATITUDE_COLUMN].values[tornado_row]

    tornado_longitude_deg = tornado_table[
        linkage.EVENT_LONGITUDE_COLUMN].values[tornado_row]

    axes_object.plot(tornado_longitude_deg,
                     tornado_latitude_deg,
                     linestyle='None',
                     marker=TORNADO_MARKER_TYPE,
                     markersize=TORNADO_MARKER_SIZE,
                     markeredgewidth=TORNADO_MARKER_EDGE_WIDTH,
                     markerfacecolor=plotting_utils.colour_from_numpy_to_tuple(
                         TORNADO_MARKER_COLOUR),
                     markeredgecolor=plotting_utils.colour_from_numpy_to_tuple(
                         TORNADO_MARKER_COLOUR))

    tornado_time_string = time_conversion.unix_sec_to_string(
        tornado_time_unix_sec, TIME_FORMAT)

    title_string = (
        'Unlinked tornado at {0:s}, {1:.2f} deg N, {2:.2f} deg E').format(
            tornado_time_string, tornado_latitude_deg, tornado_longitude_deg)

    pyplot.title(title_string, fontsize=TITLE_FONT_SIZE)

    print('Saving figure to: "{0:s}"...'.format(output_file_name))
    pyplot.savefig(output_file_name, dpi=FIGURE_RESOLUTION_DPI)
    pyplot.close()

    imagemagick_utils.trim_whitespace(input_file_name=output_file_name,
                                      output_file_name=output_file_name)
Exemplo n.º 8
0
        or `tracking_utils.make_buffers_around_storm_objects`.
    :return: storm_table: pandas DataFrame with columns from both stats_table
        and polygon_table.
    """

    return polygon_table.merge(stats_table,
                               on=tracking_utils.STORM_ID_COLUMN,
                               how='inner')


if __name__ == '__main__':
    STATS_TABLE = read_stats_from_xml(XML_FILE_NAME,
                                      spc_date_string=SPC_DATE_STRING)
    print STATS_TABLE

    METADATA_DICT = myrorss_and_mrms_io.read_metadata_from_raw_file(
        NETCDF_FILE_NAME, data_source=radar_utils.MYRORSS_SOURCE_ID)
    POLYGON_TABLE = read_polygons_from_netcdf(
        NETCDF_FILE_NAME,
        metadata_dict=METADATA_DICT,
        spc_date_string=SPC_DATE_STRING,
        tracking_start_time_unix_sec=TRACKING_START_TIME_UNIX_SEC,
        tracking_end_time_unix_sec=TRACKING_END_TIME_UNIX_SEC)
    print POLYGON_TABLE

    POLYGON_TABLE = tracking_utils.make_buffers_around_storm_objects(
        POLYGON_TABLE,
        min_distances_metres=MIN_BUFFER_DISTS_METRES,
        max_distances_metres=MAX_BUFFER_DISTS_METRES)
    print POLYGON_TABLE

    STORM_TABLE = join_stats_and_polygons(STATS_TABLE, POLYGON_TABLE)
def _plot_one_example_one_time(storm_object_table, full_id_string,
                               valid_time_unix_sec, tornado_table,
                               top_myrorss_dir_name, radar_field_name,
                               radar_height_m_asl, latitude_limits_deg,
                               longitude_limits_deg):
    """Plots one example with surrounding context at one time.

    :param storm_object_table: pandas DataFrame, containing only storm objects
        at one time with the relevant primary ID.  Columns are documented in
        `storm_tracking_io.write_file`.
    :param full_id_string: Full ID of storm of interest.
    :param valid_time_unix_sec: Valid time.
    :param tornado_table: pandas DataFrame created by
        `linkage._read_input_tornado_reports`.
    :param top_myrorss_dir_name: See documentation at top of file.
    :param radar_field_name: Same.
    :param radar_height_m_asl: Same.
    :param latitude_limits_deg: See doc for `_get_plotting_limits`.
    :param longitude_limits_deg: Same.
    """

    min_plot_latitude_deg = latitude_limits_deg[0]
    max_plot_latitude_deg = latitude_limits_deg[1]
    min_plot_longitude_deg = longitude_limits_deg[0]
    max_plot_longitude_deg = longitude_limits_deg[1]

    radar_file_name = myrorss_and_mrms_io.find_raw_file(
        top_directory_name=top_myrorss_dir_name,
        spc_date_string=time_conversion.time_to_spc_date_string(
            valid_time_unix_sec),
        unix_time_sec=valid_time_unix_sec,
        data_source=radar_utils.MYRORSS_SOURCE_ID,
        field_name=radar_field_name,
        height_m_asl=radar_height_m_asl,
        raise_error_if_missing=True)

    print('Reading data from: "{0:s}"...'.format(radar_file_name))

    radar_metadata_dict = myrorss_and_mrms_io.read_metadata_from_raw_file(
        netcdf_file_name=radar_file_name,
        data_source=radar_utils.MYRORSS_SOURCE_ID)

    sparse_grid_table = (myrorss_and_mrms_io.read_data_from_sparse_grid_file(
        netcdf_file_name=radar_file_name,
        field_name_orig=radar_metadata_dict[
            myrorss_and_mrms_io.FIELD_NAME_COLUMN_ORIG],
        data_source=radar_utils.MYRORSS_SOURCE_ID,
        sentinel_values=radar_metadata_dict[radar_utils.SENTINEL_VALUE_COLUMN])
                         )

    radar_matrix, grid_point_latitudes_deg, grid_point_longitudes_deg = (
        radar_s2f.sparse_to_full_grid(sparse_grid_table=sparse_grid_table,
                                      metadata_dict=radar_metadata_dict))

    radar_matrix = numpy.flip(radar_matrix, axis=0)
    grid_point_latitudes_deg = grid_point_latitudes_deg[::-1]

    axes_object, basemap_object = (
        plotting_utils.create_equidist_cylindrical_map(
            min_latitude_deg=min_plot_latitude_deg,
            max_latitude_deg=max_plot_latitude_deg,
            min_longitude_deg=min_plot_longitude_deg,
            max_longitude_deg=max_plot_longitude_deg,
            resolution_string='i')[1:])

    plotting_utils.plot_coastlines(basemap_object=basemap_object,
                                   axes_object=axes_object,
                                   line_colour=BORDER_COLOUR)

    plotting_utils.plot_countries(basemap_object=basemap_object,
                                  axes_object=axes_object,
                                  line_colour=BORDER_COLOUR)

    plotting_utils.plot_states_and_provinces(basemap_object=basemap_object,
                                             axes_object=axes_object,
                                             line_colour=BORDER_COLOUR)

    plotting_utils.plot_parallels(basemap_object=basemap_object,
                                  axes_object=axes_object,
                                  num_parallels=NUM_PARALLELS)

    plotting_utils.plot_meridians(basemap_object=basemap_object,
                                  axes_object=axes_object,
                                  num_meridians=NUM_MERIDIANS)

    radar_plotting.plot_latlng_grid(
        field_matrix=radar_matrix,
        field_name=radar_field_name,
        axes_object=axes_object,
        min_grid_point_latitude_deg=numpy.min(grid_point_latitudes_deg),
        min_grid_point_longitude_deg=numpy.min(grid_point_longitudes_deg),
        latitude_spacing_deg=numpy.diff(grid_point_latitudes_deg[:2])[0],
        longitude_spacing_deg=numpy.diff(grid_point_longitudes_deg[:2])[0])

    colour_map_object, colour_norm_object = (
        radar_plotting.get_default_colour_scheme(radar_field_name))

    plotting_utils.plot_colour_bar(axes_object_or_matrix=axes_object,
                                   data_matrix=radar_matrix,
                                   colour_map_object=colour_map_object,
                                   colour_norm_object=colour_norm_object,
                                   orientation_string='horizontal',
                                   extend_min=False,
                                   extend_max=True,
                                   fraction_of_axis_length=0.8)

    first_list, second_list = temporal_tracking.full_to_partial_ids(
        [full_id_string])
    primary_id_string = first_list[0]
    secondary_id_string = second_list[0]

    # Plot outlines of unrelated storms (with different primary IDs).
    this_storm_object_table = storm_object_table.loc[storm_object_table[
        tracking_utils.PRIMARY_ID_COLUMN] != primary_id_string]

    storm_plotting.plot_storm_outlines(
        storm_object_table=this_storm_object_table,
        axes_object=axes_object,
        basemap_object=basemap_object,
        line_width=2,
        line_colour='k',
        line_style='dashed')

    # Plot outlines of related storms (with the same primary ID).
    this_storm_object_table = storm_object_table.loc[
        (storm_object_table[tracking_utils.PRIMARY_ID_COLUMN] ==
         primary_id_string) & (storm_object_table[
             tracking_utils.SECONDARY_ID_COLUMN] != secondary_id_string)]

    this_num_storm_objects = len(this_storm_object_table.index)

    if this_num_storm_objects > 0:
        storm_plotting.plot_storm_outlines(
            storm_object_table=this_storm_object_table,
            axes_object=axes_object,
            basemap_object=basemap_object,
            line_width=2,
            line_colour='k',
            line_style='solid')

        for j in range(len(this_storm_object_table)):
            axes_object.text(
                this_storm_object_table[
                    tracking_utils.CENTROID_LONGITUDE_COLUMN].values[j],
                this_storm_object_table[
                    tracking_utils.CENTROID_LATITUDE_COLUMN].values[j],
                'P',
                fontsize=FONT_SIZE,
                color=FONT_COLOUR,
                fontweight='bold',
                horizontalalignment='center',
                verticalalignment='center')

    # Plot outline of storm of interest (same secondary ID).
    this_storm_object_table = storm_object_table.loc[storm_object_table[
        tracking_utils.SECONDARY_ID_COLUMN] == secondary_id_string]

    storm_plotting.plot_storm_outlines(
        storm_object_table=this_storm_object_table,
        axes_object=axes_object,
        basemap_object=basemap_object,
        line_width=4,
        line_colour='k',
        line_style='solid')

    this_num_storm_objects = len(this_storm_object_table.index)

    plot_forecast = (this_num_storm_objects > 0 and FORECAST_PROBABILITY_COLUMN
                     in list(this_storm_object_table))

    if plot_forecast:
        this_polygon_object_latlng = this_storm_object_table[
            tracking_utils.LATLNG_POLYGON_COLUMN].values[0]

        this_latitude_deg = numpy.min(
            numpy.array(this_polygon_object_latlng.exterior.xy[1]))

        this_longitude_deg = this_storm_object_table[
            tracking_utils.CENTROID_LONGITUDE_COLUMN].values[0]

        label_string = 'Prob = {0:.3f}\nat {1:s}'.format(
            this_storm_object_table[FORECAST_PROBABILITY_COLUMN].values[0],
            time_conversion.unix_sec_to_string(valid_time_unix_sec,
                                               TORNADO_TIME_FORMAT))

        bounding_box_dict = {
            'facecolor':
            plotting_utils.colour_from_numpy_to_tuple(
                PROBABILITY_BACKGROUND_COLOUR),
            'alpha':
            PROBABILITY_BACKGROUND_OPACITY,
            'edgecolor':
            'k',
            'linewidth':
            1
        }

        axes_object.text(this_longitude_deg,
                         this_latitude_deg,
                         label_string,
                         fontsize=FONT_SIZE,
                         color=plotting_utils.colour_from_numpy_to_tuple(
                             PROBABILITY_FONT_COLOUR),
                         fontweight='bold',
                         bbox=bounding_box_dict,
                         horizontalalignment='center',
                         verticalalignment='top',
                         zorder=1e10)

    tornado_latitudes_deg = tornado_table[linkage.EVENT_LATITUDE_COLUMN].values
    tornado_longitudes_deg = tornado_table[
        linkage.EVENT_LONGITUDE_COLUMN].values

    tornado_times_unix_sec = tornado_table[linkage.EVENT_TIME_COLUMN].values
    tornado_time_strings = [
        time_conversion.unix_sec_to_string(t, TORNADO_TIME_FORMAT)
        for t in tornado_times_unix_sec
    ]

    axes_object.plot(tornado_longitudes_deg,
                     tornado_latitudes_deg,
                     linestyle='None',
                     marker=TORNADO_MARKER_TYPE,
                     markersize=TORNADO_MARKER_SIZE,
                     markeredgewidth=TORNADO_MARKER_EDGE_WIDTH,
                     markerfacecolor=plotting_utils.colour_from_numpy_to_tuple(
                         TORNADO_MARKER_COLOUR),
                     markeredgecolor=plotting_utils.colour_from_numpy_to_tuple(
                         TORNADO_MARKER_COLOUR))

    num_tornadoes = len(tornado_latitudes_deg)

    for j in range(num_tornadoes):
        axes_object.text(tornado_longitudes_deg[j] + 0.02,
                         tornado_latitudes_deg[j] - 0.02,
                         tornado_time_strings[j],
                         fontsize=FONT_SIZE,
                         color=FONT_COLOUR,
                         fontweight='bold',
                         horizontalalignment='left',
                         verticalalignment='top')
Exemplo n.º 10
0
def _plot_one_example_one_time(
        storm_object_table, full_id_string, valid_time_unix_sec,
        tornado_table, top_myrorss_dir_name, radar_field_name,
        radar_height_m_asl, latitude_limits_deg, longitude_limits_deg):
    """Plots one example with surrounding context at one time.

    :param storm_object_table: pandas DataFrame, containing only storm objects
        at one time with the relevant primary ID.  Columns are documented in
        `storm_tracking_io.write_file`.
    :param full_id_string: Full ID of storm of interest.
    :param valid_time_unix_sec: Valid time.
    :param tornado_table: pandas DataFrame created by
        `linkage._read_input_tornado_reports`.
    :param top_myrorss_dir_name: See documentation at top of file.
    :param radar_field_name: Same.
    :param radar_height_m_asl: Same.
    :param latitude_limits_deg: See doc for `_get_plotting_limits`.
    :param longitude_limits_deg: Same.
    """

    min_plot_latitude_deg = latitude_limits_deg[0]
    max_plot_latitude_deg = latitude_limits_deg[1]
    min_plot_longitude_deg = longitude_limits_deg[0]
    max_plot_longitude_deg = longitude_limits_deg[1]

    radar_file_name = myrorss_and_mrms_io.find_raw_file_inexact_time(
        top_directory_name=top_myrorss_dir_name,
        desired_time_unix_sec=valid_time_unix_sec,
        spc_date_string=time_conversion.time_to_spc_date_string(
            valid_time_unix_sec),
        data_source=radar_utils.MYRORSS_SOURCE_ID,
        field_name=radar_field_name, height_m_asl=radar_height_m_asl,
        max_time_offset_sec=
        myrorss_and_mrms_io.DEFAULT_MAX_TIME_OFFSET_FOR_NON_SHEAR_SEC,
        raise_error_if_missing=True)

    print('Reading data from: "{0:s}"...'.format(radar_file_name))

    radar_metadata_dict = myrorss_and_mrms_io.read_metadata_from_raw_file(
        netcdf_file_name=radar_file_name,
        data_source=radar_utils.MYRORSS_SOURCE_ID)

    sparse_grid_table = (
        myrorss_and_mrms_io.read_data_from_sparse_grid_file(
            netcdf_file_name=radar_file_name,
            field_name_orig=radar_metadata_dict[
                myrorss_and_mrms_io.FIELD_NAME_COLUMN_ORIG],
            data_source=radar_utils.MYRORSS_SOURCE_ID,
            sentinel_values=radar_metadata_dict[
                radar_utils.SENTINEL_VALUE_COLUMN]
        )
    )

    radar_matrix, grid_point_latitudes_deg, grid_point_longitudes_deg = (
        radar_s2f.sparse_to_full_grid(
            sparse_grid_table=sparse_grid_table,
            metadata_dict=radar_metadata_dict)
    )

    radar_matrix = numpy.flip(radar_matrix, axis=0)
    grid_point_latitudes_deg = grid_point_latitudes_deg[::-1]

    axes_object, basemap_object = (
        plotting_utils.create_equidist_cylindrical_map(
            min_latitude_deg=min_plot_latitude_deg,
            max_latitude_deg=max_plot_latitude_deg,
            min_longitude_deg=min_plot_longitude_deg,
            max_longitude_deg=max_plot_longitude_deg, resolution_string='h'
        )[1:]
    )

    plotting_utils.plot_coastlines(
        basemap_object=basemap_object, axes_object=axes_object,
        line_colour=plotting_utils.DEFAULT_COUNTRY_COLOUR)

    plotting_utils.plot_countries(
        basemap_object=basemap_object, axes_object=axes_object)

    plotting_utils.plot_states_and_provinces(
        basemap_object=basemap_object, axes_object=axes_object)

    plotting_utils.plot_parallels(
        basemap_object=basemap_object, axes_object=axes_object,
        num_parallels=NUM_PARALLELS, line_width=0)

    plotting_utils.plot_meridians(
        basemap_object=basemap_object, axes_object=axes_object,
        num_meridians=NUM_MERIDIANS, line_width=0)

    radar_plotting.plot_latlng_grid(
        field_matrix=radar_matrix, field_name=radar_field_name,
        axes_object=axes_object,
        min_grid_point_latitude_deg=numpy.min(grid_point_latitudes_deg),
        min_grid_point_longitude_deg=numpy.min(grid_point_longitudes_deg),
        latitude_spacing_deg=numpy.diff(grid_point_latitudes_deg[:2])[0],
        longitude_spacing_deg=numpy.diff(grid_point_longitudes_deg[:2])[0]
    )

    colour_map_object, colour_norm_object = (
        radar_plotting.get_default_colour_scheme(radar_field_name)
    )

    plotting_utils.plot_colour_bar(
        axes_object_or_matrix=axes_object, data_matrix=radar_matrix,
        colour_map_object=colour_map_object,
        colour_norm_object=colour_norm_object, orientation_string='horizontal',
        padding=0.05, extend_min=False, extend_max=True,
        fraction_of_axis_length=0.8)

    first_list, second_list = temporal_tracking.full_to_partial_ids(
        [full_id_string]
    )
    primary_id_string = first_list[0]
    secondary_id_string = second_list[0]

    # Plot outlines of unrelated storms (with different primary IDs).
    this_storm_object_table = storm_object_table.loc[
        storm_object_table[tracking_utils.PRIMARY_ID_COLUMN] !=
        primary_id_string
    ]

    storm_plotting.plot_storm_outlines(
        storm_object_table=this_storm_object_table, axes_object=axes_object,
        basemap_object=basemap_object, line_width=AUXILIARY_STORM_WIDTH,
        line_colour='k', line_style='dashed')

    # Plot outlines of related storms (with the same primary ID).
    this_storm_object_table = storm_object_table.loc[
        (storm_object_table[tracking_utils.PRIMARY_ID_COLUMN] ==
         primary_id_string) &
        (storm_object_table[tracking_utils.SECONDARY_ID_COLUMN] !=
         secondary_id_string)
    ]

    this_num_storm_objects = len(this_storm_object_table.index)

    if this_num_storm_objects > 0:
        storm_plotting.plot_storm_outlines(
            storm_object_table=this_storm_object_table, axes_object=axes_object,
            basemap_object=basemap_object, line_width=AUXILIARY_STORM_WIDTH,
            line_colour='k', line_style='solid'
        )

        for j in range(len(this_storm_object_table)):
            axes_object.text(
                this_storm_object_table[
                    tracking_utils.CENTROID_LONGITUDE_COLUMN
                ].values[j],
                this_storm_object_table[
                    tracking_utils.CENTROID_LATITUDE_COLUMN
                ].values[j],
                'P',
                fontsize=MAIN_FONT_SIZE, color=FONT_COLOUR, fontweight='bold',
                horizontalalignment='center', verticalalignment='center'
            )

    # Plot outline of storm of interest (same secondary ID).
    this_storm_object_table = storm_object_table.loc[
        storm_object_table[tracking_utils.SECONDARY_ID_COLUMN] ==
        secondary_id_string
    ]

    storm_plotting.plot_storm_outlines(
        storm_object_table=this_storm_object_table, axes_object=axes_object,
        basemap_object=basemap_object, line_width=MAIN_STORM_WIDTH,
        line_colour='k', line_style='solid')

    this_num_storm_objects = len(this_storm_object_table.index)

    plot_forecast = (
        this_num_storm_objects > 0 and
        FORECAST_PROBABILITY_COLUMN in list(this_storm_object_table)
    )

    if plot_forecast:
        label_string = 'Prob = {0:.3f}\nat {1:s}'.format(
            this_storm_object_table[FORECAST_PROBABILITY_COLUMN].values[0],
            time_conversion.unix_sec_to_string(
                valid_time_unix_sec, TORNADO_TIME_FORMAT)
        )

        axes_object.set_title(
            label_string.replace('\n', ' '), fontsize=TITLE_FONT_SIZE
        )

    tornado_id_strings = tornado_table[tornado_io.TORNADO_ID_COLUMN].values

    for this_tornado_id_string in numpy.unique(tornado_id_strings):
        these_rows = numpy.where(
            tornado_id_strings == this_tornado_id_string
        )[0]

        this_tornado_table = tornado_table.iloc[these_rows].sort_values(
            linkage.EVENT_TIME_COLUMN, axis=0, ascending=True, inplace=False
        )
        _plot_one_tornado(
            tornado_table=this_tornado_table, axes_object=axes_object
        )
def _run(top_radar_dir_name, top_echo_classifn_dir_name, valid_time_string,
         min_latitude_deg, max_latitude_deg, min_longitude_deg,
         max_longitude_deg, output_dir_name):
    """Makes figure to explain storm detection.

    This is effectively the main method.

    :param top_radar_dir_name: See documentation at top of file.
    :param top_echo_classifn_dir_name: Same.
    :param valid_time_string: Same.
    :param min_latitude_deg: Same.
    :param max_latitude_deg: Same.
    :param min_longitude_deg: Same.
    :param max_longitude_deg: Same.
    :param output_dir_name: Same.
    """

    file_system_utils.mkdir_recursive_if_necessary(
        directory_name=output_dir_name
    )

    valid_time_unix_sec = time_conversion.string_to_unix_sec(
        valid_time_string, TIME_FORMAT
    )
    spc_date_string = time_conversion.time_to_spc_date_string(
        valid_time_unix_sec
    )

    num_trials = len(MIN_POLYGON_SIZES_PX)
    tracking_dir_names = [None] * num_trials

    for k in range(num_trials):
        tracking_dir_names[k] = (
            '{0:s}/tracking/min-polygon-size-px={1:d}_recompute-centroids={2:d}'
        ).format(
            output_dir_name, MIN_POLYGON_SIZES_PX[k],
            int(RECOMPUTE_CENTROID_FLAGS[k])
        )

        echo_top_tracking.run_tracking(
            top_radar_dir_name=top_radar_dir_name,
            top_output_dir_name=tracking_dir_names[k],
            first_spc_date_string=spc_date_string,
            last_spc_date_string=spc_date_string,
            first_time_unix_sec=valid_time_unix_sec,
            last_time_unix_sec=valid_time_unix_sec + 1,
            top_echo_classifn_dir_name=top_echo_classifn_dir_name,
            min_polygon_size_pixels=MIN_POLYGON_SIZES_PX[k],
            recompute_centroids=RECOMPUTE_CENTROID_FLAGS[k]
        )
        print(SEPARATOR_STRING)

    echo_top_file_name = myrorss_and_mrms_io.find_raw_file(
        top_directory_name=top_radar_dir_name,
        unix_time_sec=valid_time_unix_sec, spc_date_string=spc_date_string,
        field_name=radar_utils.ECHO_TOP_40DBZ_NAME,
        data_source=radar_utils.MYRORSS_SOURCE_ID, raise_error_if_missing=True
    )

    print('Reading data from: "{0:s}"...'.format(echo_top_file_name))

    metadata_dict = myrorss_and_mrms_io.read_metadata_from_raw_file(
        netcdf_file_name=echo_top_file_name,
        data_source=radar_utils.MYRORSS_SOURCE_ID
    )

    sparse_grid_table = myrorss_and_mrms_io.read_data_from_sparse_grid_file(
        netcdf_file_name=echo_top_file_name,
        field_name_orig=metadata_dict[
            myrorss_and_mrms_io.FIELD_NAME_COLUMN_ORIG],
        data_source=radar_utils.MYRORSS_SOURCE_ID,
        sentinel_values=metadata_dict[radar_utils.SENTINEL_VALUE_COLUMN]
    )

    echo_top_matrix_km_asl, radar_latitudes_deg, radar_longitudes_deg = (
        radar_s2f.sparse_to_full_grid(
            sparse_grid_table=sparse_grid_table, metadata_dict=metadata_dict)
    )

    echo_top_matrix_km_asl = numpy.flip(echo_top_matrix_km_asl, axis=0)
    radar_latitudes_deg = radar_latitudes_deg[::-1]

    echo_classifn_file_name = echo_classifn.find_classification_file(
        top_directory_name=top_echo_classifn_dir_name,
        valid_time_unix_sec=valid_time_unix_sec,
        desire_zipped=True, allow_zipped_or_unzipped=True,
        raise_error_if_missing=True
    )

    print('Reading data from: "{0:s}"...'.format(echo_classifn_file_name))
    convective_flag_matrix = echo_classifn.read_classifications(
        echo_classifn_file_name
    )[0]

    good_indices = numpy.where(numpy.logical_and(
        radar_latitudes_deg >= min_latitude_deg,
        radar_latitudes_deg <= max_latitude_deg
    ))[0]

    echo_top_matrix_km_asl = echo_top_matrix_km_asl[good_indices, ...]
    convective_flag_matrix = convective_flag_matrix[good_indices, ...]
    radar_latitudes_deg = radar_latitudes_deg[good_indices]

    good_indices = numpy.where(numpy.logical_and(
        radar_longitudes_deg >= min_longitude_deg,
        radar_longitudes_deg <= max_longitude_deg
    ))[0]

    echo_top_matrix_km_asl = echo_top_matrix_km_asl[..., good_indices]
    convective_flag_matrix = convective_flag_matrix[..., good_indices]
    radar_longitudes_deg = radar_longitudes_deg[good_indices]

    this_figure_object, this_axes_object = _plot_echo_tops(
        echo_top_matrix_km_asl=echo_top_matrix_km_asl,
        latitudes_deg=radar_latitudes_deg, longitudes_deg=radar_longitudes_deg,
        plot_colour_bar=False, convective_flag_matrix=None
    )[:2]

    this_axes_object.set_title('All echoes')
    plotting_utils.label_axes(axes_object=this_axes_object, label_string='(a)')

    panel_file_names = [
        '{0:s}/before_echo_classification.jpg'.format(output_dir_name)
    ]

    print('Saving figure to: "{0:s}"...'.format(panel_file_names[-1]))
    this_figure_object.savefig(
        panel_file_names[-1], dpi=FIGURE_RESOLUTION_DPI,
        pad_inches=0, bbox_inches='tight'
    )
    pyplot.close(this_figure_object)

    this_figure_object, this_axes_object = _plot_echo_tops(
        echo_top_matrix_km_asl=echo_top_matrix_km_asl,
        latitudes_deg=radar_latitudes_deg, longitudes_deg=radar_longitudes_deg,
        plot_colour_bar=False, convective_flag_matrix=convective_flag_matrix
    )[:2]

    this_axes_object.set_title('Convective echoes only')
    plotting_utils.label_axes(axes_object=this_axes_object, label_string='(b)')

    panel_file_names.append(
        '{0:s}/after_echo_classification.jpg'.format(output_dir_name)
    )

    print('Saving figure to: "{0:s}"...'.format(panel_file_names[-1]))
    this_figure_object.savefig(
        panel_file_names[-1], dpi=FIGURE_RESOLUTION_DPI,
        pad_inches=0, bbox_inches='tight'
    )
    pyplot.close(this_figure_object)

    letter_label = 'b'

    for k in range(num_trials):
        this_tracking_file_name = tracking_io.find_file(
            top_tracking_dir_name=tracking_dir_names[k],
            tracking_scale_metres2=
            echo_top_tracking.DUMMY_TRACKING_SCALE_METRES2,
            source_name=tracking_utils.SEGMOTION_NAME,
            valid_time_unix_sec=valid_time_unix_sec,
            spc_date_string=spc_date_string,
            raise_error_if_missing=True
        )

        print('Reading data from: "{0:s}"...'.format(this_tracking_file_name))
        this_storm_object_table = tracking_io.read_file(this_tracking_file_name)

        this_figure_object, this_axes_object, this_basemap_object = (
            _plot_echo_tops(
                echo_top_matrix_km_asl=echo_top_matrix_km_asl,
                latitudes_deg=radar_latitudes_deg,
                longitudes_deg=radar_longitudes_deg, plot_colour_bar=k > 0,
                convective_flag_matrix=convective_flag_matrix)
        )

        storm_plotting.plot_storm_outlines(
            storm_object_table=this_storm_object_table,
            axes_object=this_axes_object, basemap_object=this_basemap_object,
            line_width=POLYGON_WIDTH, line_colour=POLYGON_COLOUR
        )

        these_x_metres, these_y_metres = this_basemap_object(
            this_storm_object_table[
                tracking_utils.CENTROID_LONGITUDE_COLUMN].values,
            this_storm_object_table[
                tracking_utils.CENTROID_LATITUDE_COLUMN].values
        )

        this_axes_object.plot(
            these_x_metres, these_y_metres, linestyle='None',
            marker=MARKER_TYPE, markersize=MARKER_SIZE,
            markerfacecolor=MARKER_COLOUR, markeredgecolor=MARKER_COLOUR,
            markeredgewidth=MARKER_EDGE_WIDTH
        )

        this_title_string = (
            'Minimum size = {0:d} GP, {1:s} storm centers'
        ).format(
            MIN_POLYGON_SIZES_PX[k],
            'recomputed' if RECOMPUTE_CENTROID_FLAGS[k] else 'original'
        )

        this_axes_object.set_title(this_title_string)

        letter_label = chr(ord(letter_label) + 1)
        plotting_utils.label_axes(
            axes_object=this_axes_object,
            label_string='({0:s})'.format(letter_label)
        )

        panel_file_names.append(
            '{0:s}/detection{1:d}.jpg'.format(output_dir_name, k)
        )

        print('Saving figure to: "{0:s}"...'.format(panel_file_names[-1]))
        this_figure_object.savefig(
            panel_file_names[-1], dpi=FIGURE_RESOLUTION_DPI,
            pad_inches=0, bbox_inches='tight'
        )
        pyplot.close(this_figure_object)

    concat_file_name = '{0:s}/storm_detection.jpg'.format(output_dir_name)
    print('Concatenating panels to: "{0:s}"...'.format(concat_file_name))

    imagemagick_utils.concatenate_images(
        input_file_names=panel_file_names, output_file_name=concat_file_name,
        num_panel_rows=3, num_panel_columns=2
    )