Beispiel #1
0
def _run(top_tracking_dir_name, first_spc_date_string, last_spc_date_string,
         top_myrorss_dir_name, radar_field_name, output_file_name):
    """Evaluates a set of storm tracks.

    This is effectively the main method.

    :param top_tracking_dir_name: See documentation at top of file.
    :param first_spc_date_string: Same.
    :param last_spc_date_string: Same.
    :param top_myrorss_dir_name: Same.
    :param radar_field_name: Same.
    :param output_file_name: Same.
    """

    spc_date_strings = time_conversion.get_spc_dates_in_range(
        first_spc_date_string=first_spc_date_string,
        last_spc_date_string=last_spc_date_string)

    list_of_storm_object_tables = []

    for this_spc_date_string in spc_date_strings:
        these_file_names = tracking_io.find_files_one_spc_date(
            top_tracking_dir_name=top_tracking_dir_name,
            tracking_scale_metres2=echo_top_tracking.
            DUMMY_TRACKING_SCALE_METRES2,
            source_name=tracking_utils.SEGMOTION_NAME,
            spc_date_string=this_spc_date_string,
            raise_error_if_missing=False)[0]

        if len(these_file_names) == 0:
            continue

        this_storm_object_table = tracking_io.read_many_files(
            these_file_names)[STORM_OBJECT_COLUMNS]

        list_of_storm_object_tables.append(this_storm_object_table)

        if this_spc_date_string != spc_date_strings[-1]:
            print(MINOR_SEPARATOR_STRING)

        if len(list_of_storm_object_tables) == 1:
            continue

        list_of_storm_object_tables[-1] = list_of_storm_object_tables[
            -1].align(list_of_storm_object_tables[0], axis=1)[0]

    print(SEPARATOR_STRING)

    storm_object_table = pandas.concat(list_of_storm_object_tables,
                                       axis=0,
                                       ignore_index=True)

    evaluation_dict = tracking_eval.evaluate_tracks(
        storm_object_table=storm_object_table,
        top_myrorss_dir_name=top_myrorss_dir_name,
        radar_field_name=radar_field_name)

    print('Writing results to: "{0:s}"...'.format(output_file_name))
    tracking_eval.write_file(evaluation_dict=evaluation_dict,
                             pickle_file_name=output_file_name)
    def test_find_files_one_spc_date(self):
        """Ensures correct output from find_files_one_spc_date."""

        _, this_glob_pattern = tracking_io.find_files_one_spc_date(
            top_tracking_dir_name=TOP_SEGMOTION_DIR_NAME,
            tracking_scale_metres2=TRACKING_SCALE_METRES2,
            source_name=tracking_utils.SEGMOTION_NAME,
            spc_date_string=VALID_SPC_DATE_STRING,
            raise_error_if_missing=False)

        self.assertTrue(this_glob_pattern == GLOB_PATTERN_FOR_SPC_DATE)
def _run(tornado_dir_name, top_tracking_dir_name, tracking_scale_metres2,
         genesis_only, first_spc_date_string, last_spc_date_string,
         top_output_dir_name):
    """Runs `linkage.link_tornadoes_to_storms`.

    This is effectively the main method.

    :param tornado_dir_name: See documentation at top of file.
    :param top_tracking_dir_name: Same.
    :param tracking_scale_metres2: Same.
    :param genesis_only: Same.
    :param first_spc_date_string: Same.
    :param last_spc_date_string: Same.
    :param top_output_dir_name: Same.
    """

    spc_date_strings = time_conversion.get_spc_dates_in_range(
        first_spc_date_string=first_spc_date_string,
        last_spc_date_string=last_spc_date_string)

    tracking_file_names = []

    for this_spc_date_string in spc_date_strings:
        these_file_names = tracking_io.find_files_one_spc_date(
            top_tracking_dir_name=top_tracking_dir_name,
            tracking_scale_metres2=tracking_scale_metres2,
            source_name=tracking_utils.SEGMOTION_NAME,
            spc_date_string=this_spc_date_string,
            raise_error_if_missing=False)[0]

        if len(these_file_names) == 0:
            if len(tracking_file_names) > 0:
                _link_tornadoes_one_period(
                    tracking_file_names=tracking_file_names,
                    tornado_dir_name=tornado_dir_name,
                    genesis_only=genesis_only,
                    top_output_dir_name=top_output_dir_name)

                print(SEPARATOR_STRING)
                tracking_file_names = []

            continue

        tracking_file_names += these_file_names

    _link_tornadoes_one_period(tracking_file_names=tracking_file_names,
                               tornado_dir_name=tornado_dir_name,
                               genesis_only=genesis_only,
                               top_output_dir_name=top_output_dir_name)
def _compute_radar_stats_from_gridrad(spc_date_string, top_tracking_dir_name,
                                      tracking_scale_metres2,
                                      top_gridrad_dir_name, output_dir_name):
    """Uses GridRad data to compute radar statistics for each storm object.

    :param spc_date_string: SPC (Storm Prediction Center) date in format
        "yyyymmdd".  Radar stats will be computed for all storm objects on this
        date.
    :param top_tracking_dir_name: Name of top-level directory with storm-
        tracking files.  Storm objects will be read from here.
    :param tracking_scale_metres2: Tracking scale (minimum storm area).  Will be
        used to find tracking files.
    :param top_gridrad_dir_name: Name of top-level directory with GridRad files.
    :param output_dir_name: Name of output directory.  A single Pickle file,
        with radar stats for each storm object, will be written here.
    """

    file_system_utils.mkdir_recursive_if_necessary(
        directory_name=output_dir_name)

    tracking_file_names = tracking_io.find_files_one_spc_date(
        spc_date_string=spc_date_string,
        source_name=tracking_utils.SEGMOTION_NAME,
        top_tracking_dir_name=top_tracking_dir_name,
        tracking_scale_metres2=tracking_scale_metres2)[0]

    storm_object_table = tracking_io.read_many_files(tracking_file_names)
    print(SEPARATOR_STRING)

    storm_object_statistic_table = (
        radar_statistics.get_storm_based_radar_stats_gridrad(
            storm_object_table=storm_object_table,
            top_radar_dir_name=top_gridrad_dir_name))
    print(SEPARATOR_STRING)

    output_file_name = '{0:s}/radar_stats_for_storm_objects_{1:s}.p'.format(
        output_dir_name, spc_date_string)

    print('Writing radar statistics to file: "{0:s}"...'.format(
        output_file_name))

    radar_statistics.write_stats_for_storm_objects(
        storm_object_statistic_table, output_file_name)
def _compute_shape_stats(spc_date_string, top_tracking_dir_name,
                         tracking_scale_metres2, output_dir_name):
    """Computes shape statistics for each storm object.

    :param spc_date_string: SPC (Storm Prediction Center) date in format
        "yyyymmdd".  Shape statistics will be computed for all storm objects on
        this date.
    :param top_tracking_dir_name: Name of top-level directory with storm-
        tracking data.
    :param tracking_scale_metres2: Tracking scale (minimum storm area).  Will be
        used to find input data.
    :param output_dir_name: Name of output directory.  A single Pickle file,
        with shape statistics for each storm object, will be written here.
    """

    tracking_file_names = tracking_io.find_files_one_spc_date(
        spc_date_string=spc_date_string,
        source_name=tracking_utils.SEGMOTION_NAME,
        top_tracking_dir_name=top_tracking_dir_name,
        tracking_scale_metres2=tracking_scale_metres2)[0]

    storm_object_table = tracking_io.read_many_files(tracking_file_names)
    print(SEPARATOR_STRING)

    shape_statistic_table = shape_stats.get_stats_for_storm_objects(
        storm_object_table)
    print(SEPARATOR_STRING)

    shape_statistic_file_name = '{0:s}/shape_statistics_{1:s}.p'.format(
        output_dir_name, spc_date_string)

    print('Writing shape statistics to: "{0:s}"...'.format(
        shape_statistic_file_name))

    shape_stats.write_stats_for_storm_objects(shape_statistic_table,
                                              shape_statistic_file_name)
Beispiel #6
0
def _run(top_tracking_dir_name, first_spc_date_string, last_spc_date_string,
         colour_map_name, min_plot_latitude_deg, max_plot_latitude_deg,
         min_plot_longitude_deg, max_plot_longitude_deg, output_file_name):
    """Plots storm tracks for a continuous time period.

    This is effectively the main method.

    :param top_tracking_dir_name: See documentation at top of file.
    :param first_spc_date_string: Same.
    :param last_spc_date_string: Same.
    :param colour_map_name: Same.
    :param min_plot_latitude_deg: Same.
    :param max_plot_latitude_deg: Same.
    :param min_plot_longitude_deg: Same.
    :param max_plot_longitude_deg: Same.
    :param output_file_name: Same.
    """

    if colour_map_name in ['', 'None']:
        colour_map_object = 'random'
    else:
        colour_map_object = pyplot.cm.get_cmap(colour_map_name)

    if min_plot_latitude_deg <= SENTINEL_VALUE:
        min_plot_latitude_deg = None
    if max_plot_latitude_deg <= SENTINEL_VALUE:
        max_plot_latitude_deg = None
    if min_plot_longitude_deg <= SENTINEL_VALUE:
        min_plot_longitude_deg = None
    if max_plot_longitude_deg <= SENTINEL_VALUE:
        max_plot_longitude_deg = None

    file_system_utils.mkdir_recursive_if_necessary(file_name=output_file_name)

    spc_date_strings = time_conversion.get_spc_dates_in_range(
        first_spc_date_string=first_spc_date_string,
        last_spc_date_string=last_spc_date_string)

    list_of_storm_object_tables = []

    for this_spc_date_string in spc_date_strings:
        these_file_names = tracking_io.find_files_one_spc_date(
            top_tracking_dir_name=top_tracking_dir_name,
            tracking_scale_metres2=echo_top_tracking.
            DUMMY_TRACKING_SCALE_METRES2,
            source_name=tracking_utils.SEGMOTION_NAME,
            spc_date_string=this_spc_date_string,
            raise_error_if_missing=False)[0]

        if len(these_file_names) == 0:
            continue

        this_storm_object_table = tracking_io.read_many_files(
            these_file_names)[REQUIRED_COLUMNS]

        list_of_storm_object_tables.append(this_storm_object_table)

        if this_spc_date_string != spc_date_strings[-1]:
            print(MINOR_SEPARATOR_STRING)

        if len(list_of_storm_object_tables) == 1:
            continue

        list_of_storm_object_tables[-1] = list_of_storm_object_tables[
            -1].align(list_of_storm_object_tables[0], axis=1)[0]

    print(SEPARATOR_STRING)
    storm_object_table = pandas.concat(list_of_storm_object_tables,
                                       axis=0,
                                       ignore_index=True)

    if min_plot_latitude_deg is None:
        min_plot_latitude_deg = numpy.min(
            storm_object_table[tracking_utils.CENTROID_LATITUDE_COLUMN].values
        ) - LATLNG_BUFFER_DEG

    if max_plot_latitude_deg is None:
        max_plot_latitude_deg = numpy.max(
            storm_object_table[tracking_utils.CENTROID_LATITUDE_COLUMN].values
        ) + LATLNG_BUFFER_DEG

    if min_plot_longitude_deg is None:
        min_plot_longitude_deg = numpy.min(
            storm_object_table[tracking_utils.CENTROID_LONGITUDE_COLUMN].values
        ) - LATLNG_BUFFER_DEG

    if max_plot_longitude_deg is None:
        max_plot_longitude_deg = numpy.max(
            storm_object_table[tracking_utils.CENTROID_LONGITUDE_COLUMN].values
        ) + LATLNG_BUFFER_DEG

    _, axes_object, basemap_object = (
        plotting_utils.create_equidist_cylindrical_map(
            min_latitude_deg=min_plot_latitude_deg,
            max_latitude_deg=max_plot_latitude_deg,
            min_longitude_deg=min_plot_longitude_deg,
            max_longitude_deg=max_plot_longitude_deg,
            resolution_string='i'))

    plotting_utils.plot_coastlines(basemap_object=basemap_object,
                                   axes_object=axes_object,
                                   line_colour=BORDER_COLOUR)

    plotting_utils.plot_countries(basemap_object=basemap_object,
                                  axes_object=axes_object,
                                  line_colour=BORDER_COLOUR)

    plotting_utils.plot_states_and_provinces(basemap_object=basemap_object,
                                             axes_object=axes_object,
                                             line_colour=BORDER_COLOUR)

    plotting_utils.plot_parallels(basemap_object=basemap_object,
                                  axes_object=axes_object,
                                  num_parallels=NUM_PARALLELS)

    plotting_utils.plot_meridians(basemap_object=basemap_object,
                                  axes_object=axes_object,
                                  num_meridians=NUM_MERIDIANS)

    storm_plotting.plot_storm_tracks(storm_object_table=storm_object_table,
                                     axes_object=axes_object,
                                     basemap_object=basemap_object,
                                     colour_map_object=colour_map_object)

    print('Saving figure to: "{0:s}"...'.format(output_file_name))
    pyplot.savefig(output_file_name, dpi=FIGURE_RESOLUTION_DPI)
    pyplot.close()
def _run(top_tracking_dir_name, first_spc_date_string, last_spc_date_string,
         colour_map_name, min_plot_latitude_deg, max_plot_latitude_deg,
         min_plot_longitude_deg, max_plot_longitude_deg, output_file_name):
    """Plots storm tracks for a continuous time period.

    This is effectively the main method.

    :param top_tracking_dir_name: See documentation at top of file.
    :param first_spc_date_string: Same.
    :param last_spc_date_string: Same.
    :param colour_map_name: Same.
    :param min_plot_latitude_deg: Same.
    :param max_plot_latitude_deg: Same.
    :param min_plot_longitude_deg: Same.
    :param max_plot_longitude_deg: Same.
    :param output_file_name: Same.
    """

    if colour_map_name in ['', 'None']:
        colour_map_object = 'random'
    else:
        colour_map_object = pyplot.cm.get_cmap(colour_map_name)

    if min_plot_latitude_deg <= SENTINEL_VALUE:
        min_plot_latitude_deg = None
    if max_plot_latitude_deg <= SENTINEL_VALUE:
        max_plot_latitude_deg = None
    if min_plot_longitude_deg <= SENTINEL_VALUE:
        min_plot_longitude_deg = None
    if max_plot_longitude_deg <= SENTINEL_VALUE:
        max_plot_longitude_deg = None

    file_system_utils.mkdir_recursive_if_necessary(file_name=output_file_name)

    spc_date_strings = time_conversion.get_spc_dates_in_range(
        first_spc_date_string=first_spc_date_string,
        last_spc_date_string=last_spc_date_string)

    list_of_storm_object_tables = []

    for this_spc_date_string in spc_date_strings:
        these_file_names = tracking_io.find_files_one_spc_date(
            top_tracking_dir_name=top_tracking_dir_name,
            tracking_scale_metres2=echo_top_tracking.
            DUMMY_TRACKING_SCALE_METRES2,
            source_name=tracking_utils.SEGMOTION_NAME,
            spc_date_string=this_spc_date_string,
            raise_error_if_missing=False)[0]

        if len(these_file_names) == 0:
            continue

        this_storm_object_table = tracking_io.read_many_files(
            these_file_names)[REQUIRED_COLUMNS]

        list_of_storm_object_tables.append(this_storm_object_table)

        if this_spc_date_string != spc_date_strings[-1]:
            print(MINOR_SEPARATOR_STRING)

        if len(list_of_storm_object_tables) == 1:
            continue

        list_of_storm_object_tables[-1] = list_of_storm_object_tables[
            -1].align(list_of_storm_object_tables[0], axis=1)[0]

    print(SEPARATOR_STRING)
    storm_object_table = pandas.concat(list_of_storm_object_tables,
                                       axis=0,
                                       ignore_index=True)

    # TODO(thunderhoser): HACK
    first_time_unix_sec = time_conversion.string_to_unix_sec(
        '2011-04-27-20', '%Y-%m-%d-%H')
    storm_object_table = storm_object_table.loc[storm_object_table[
        tracking_utils.VALID_TIME_COLUMN] >= first_time_unix_sec]

    if min_plot_latitude_deg is None:
        min_plot_latitude_deg = numpy.min(
            storm_object_table[tracking_utils.CENTROID_LATITUDE_COLUMN].values
        ) - LATLNG_BUFFER_DEG

    if max_plot_latitude_deg is None:
        max_plot_latitude_deg = numpy.max(
            storm_object_table[tracking_utils.CENTROID_LATITUDE_COLUMN].values
        ) + LATLNG_BUFFER_DEG

    if min_plot_longitude_deg is None:
        min_plot_longitude_deg = numpy.min(
            storm_object_table[tracking_utils.CENTROID_LONGITUDE_COLUMN].values
        ) - LATLNG_BUFFER_DEG

    if max_plot_longitude_deg is None:
        max_plot_longitude_deg = numpy.max(
            storm_object_table[tracking_utils.CENTROID_LONGITUDE_COLUMN].values
        ) + LATLNG_BUFFER_DEG

    _, axes_object, basemap_object = (
        plotting_utils.create_equidist_cylindrical_map(
            min_latitude_deg=min_plot_latitude_deg,
            max_latitude_deg=max_plot_latitude_deg,
            min_longitude_deg=min_plot_longitude_deg,
            max_longitude_deg=max_plot_longitude_deg,
            resolution_string='i'))

    # plotting_utils.plot_coastlines(
    #     basemap_object=basemap_object, axes_object=axes_object,
    #     line_colour=BORDER_COLOUR
    # )
    plotting_utils.plot_countries(basemap_object=basemap_object,
                                  axes_object=axes_object,
                                  line_colour=BORDER_COLOUR)
    plotting_utils.plot_states_and_provinces(basemap_object=basemap_object,
                                             axes_object=axes_object,
                                             line_colour=BORDER_COLOUR)
    plotting_utils.plot_parallels(basemap_object=basemap_object,
                                  axes_object=axes_object,
                                  num_parallels=NUM_PARALLELS,
                                  line_colour=numpy.full(3, 1.))
    plotting_utils.plot_meridians(basemap_object=basemap_object,
                                  axes_object=axes_object,
                                  num_meridians=NUM_MERIDIANS,
                                  line_colour=numpy.full(3, 1.))

    colour_bar_object = storm_plotting.plot_storm_tracks(
        storm_object_table=storm_object_table,
        axes_object=axes_object,
        basemap_object=basemap_object,
        colour_map_object=colour_map_object)

    valid_times_unix_sec = (
        storm_object_table[tracking_utils.VALID_TIME_COLUMN].values)

    # TODO(thunderhoser): HACK
    tick_times_unix_sec = time_periods.range_and_interval_to_list(
        start_time_unix_sec=numpy.min(valid_times_unix_sec),
        end_time_unix_sec=numpy.max(valid_times_unix_sec),
        time_interval_sec=1800,
        include_endpoint=True)
    tick_time_strings = [
        time_conversion.unix_sec_to_string(t, COLOUR_BAR_TIME_FORMAT)
        for t in tick_times_unix_sec
    ]

    colour_bar_object.set_ticks(tick_times_unix_sec)
    colour_bar_object.set_ticklabels(tick_time_strings)

    print('Saving figure to: "{0:s}"...'.format(output_file_name))
    pyplot.savefig(output_file_name,
                   dpi=FIGURE_RESOLUTION_DPI,
                   pad_inches=0,
                   bbox_inches='tight')
    pyplot.close()
Beispiel #8
0
def _extract_storm_images(
        num_image_rows, num_image_columns, rotate_grids,
        rotated_grid_spacing_metres, radar_field_names, radar_heights_m_agl,
        spc_date_string, top_radar_dir_name, top_tracking_dir_name,
        elevation_dir_name, tracking_scale_metres2, target_name,
        top_target_dir_name, top_output_dir_name):
    """Extracts storm-centered radar images from GridRad data.

    :param num_image_rows: See documentation at top of file.
    :param num_image_columns: Same.
    :param rotate_grids: Same.
    :param rotated_grid_spacing_metres: Same.
    :param radar_field_names: Same.
    :param radar_heights_m_agl: Same.
    :param spc_date_string: Same.
    :param top_radar_dir_name: Same.
    :param top_tracking_dir_name: Same.
    :param elevation_dir_name: Same.
    :param tracking_scale_metres2: Same.
    :param target_name: Same.
    :param top_target_dir_name: Same.
    :param top_output_dir_name: Same.
    """

    if target_name in ['', 'None']:
        target_name = None

    if target_name is not None:
        target_param_dict = target_val_utils.target_name_to_params(target_name)

        target_file_name = target_val_utils.find_target_file(
            top_directory_name=top_target_dir_name,
            event_type_string=target_param_dict[
                target_val_utils.EVENT_TYPE_KEY],
            spc_date_string=spc_date_string)

        print('Reading data from: "{0:s}"...'.format(target_file_name))
        target_dict = target_val_utils.read_target_values(
            netcdf_file_name=target_file_name, target_names=[target_name]
        )

        print('\n')

    # Find storm objects on the given SPC date.
    tracking_file_names = tracking_io.find_files_one_spc_date(
        spc_date_string=spc_date_string,
        source_name=tracking_utils.SEGMOTION_NAME,
        top_tracking_dir_name=top_tracking_dir_name,
        tracking_scale_metres2=tracking_scale_metres2
    )[0]

    # Read storm objects on the given SPC date.
    storm_object_table = tracking_io.read_many_files(
        tracking_file_names
    )[storm_images.STORM_COLUMNS_NEEDED]

    print(SEPARATOR_STRING)

    if target_name is not None:
        print((
            'Removing storm objects without target values (variable = '
            '"{0:s}")...'
        ).format(target_name))

        these_indices = tracking_utils.find_storm_objects(
            all_id_strings=storm_object_table[
                tracking_utils.FULL_ID_COLUMN].values.tolist(),
            all_times_unix_sec=storm_object_table[
                tracking_utils.VALID_TIME_COLUMN].values.astype(int),
            id_strings_to_keep=target_dict[target_val_utils.FULL_IDS_KEY],
            times_to_keep_unix_sec=target_dict[
                target_val_utils.VALID_TIMES_KEY],
            allow_missing=False)

        num_storm_objects_orig = len(storm_object_table.index)
        storm_object_table = storm_object_table.iloc[these_indices]
        num_storm_objects = len(storm_object_table.index)

        print('Removed {0:d} of {1:d} storm objects!\n'.format(
            num_storm_objects_orig - num_storm_objects, num_storm_objects_orig
        ))

    # Extract storm-centered radar images.
    storm_images.extract_storm_images_gridrad(
        storm_object_table=storm_object_table,
        top_radar_dir_name=top_radar_dir_name,
        top_output_dir_name=top_output_dir_name,
        elevation_dir_name=elevation_dir_name,
        num_storm_image_rows=num_image_rows,
        num_storm_image_columns=num_image_columns, rotate_grids=rotate_grids,
        rotated_grid_spacing_metres=rotated_grid_spacing_metres,
        radar_field_names=radar_field_names,
        radar_heights_m_agl=radar_heights_m_agl)
def _run(top_tracking_dir_name, first_spc_date_string, last_spc_date_string,
         storm_colour, storm_opacity, include_secondary_ids,
         min_plot_latitude_deg, max_plot_latitude_deg, min_plot_longitude_deg,
         max_plot_longitude_deg, top_myrorss_dir_name, radar_field_name,
         radar_height_m_asl, output_dir_name):
    """Plots storm outlines (along with IDs) at each time step.

    This is effectively the main method.

    :param top_tracking_dir_name: See documentation at top of file.
    :param first_spc_date_string: Same.
    :param last_spc_date_string: Same.
    :param storm_colour: Same.
    :param storm_opacity: Same.
    :param include_secondary_ids: Same.
    :param min_plot_latitude_deg: Same.
    :param max_plot_latitude_deg: Same.
    :param min_plot_longitude_deg: Same.
    :param max_plot_longitude_deg: Same.
    :param top_myrorss_dir_name: Same.
    :param radar_field_name: Same.
    :param radar_height_m_asl: Same.
    :param output_dir_name: Same.
    """

    if top_myrorss_dir_name in ['', 'None']:
        top_myrorss_dir_name = None

    if radar_field_name != radar_utils.REFL_NAME:
        radar_height_m_asl = None

    file_system_utils.mkdir_recursive_if_necessary(
        directory_name=output_dir_name)

    spc_date_strings = time_conversion.get_spc_dates_in_range(
        first_spc_date_string=first_spc_date_string,
        last_spc_date_string=last_spc_date_string)

    tracking_file_names = []

    for this_spc_date_string in spc_date_strings:
        tracking_file_names += (tracking_io.find_files_one_spc_date(
            top_tracking_dir_name=top_tracking_dir_name,
            tracking_scale_metres2=DUMMY_TRACKING_SCALE_METRES2,
            source_name=DUMMY_SOURCE_NAME,
            spc_date_string=this_spc_date_string,
            raise_error_if_missing=False)[0])

    storm_object_table = tracking_io.read_many_files(tracking_file_names)
    print(SEPARATOR_STRING)

    latitude_limits_deg, longitude_limits_deg = _get_plotting_limits(
        min_plot_latitude_deg=min_plot_latitude_deg,
        max_plot_latitude_deg=max_plot_latitude_deg,
        min_plot_longitude_deg=min_plot_longitude_deg,
        max_plot_longitude_deg=max_plot_longitude_deg,
        storm_object_table=storm_object_table)

    min_plot_latitude_deg = latitude_limits_deg[0]
    max_plot_latitude_deg = latitude_limits_deg[1]
    min_plot_longitude_deg = longitude_limits_deg[0]
    max_plot_longitude_deg = longitude_limits_deg[1]

    valid_times_unix_sec = numpy.unique(
        storm_object_table[tracking_utils.VALID_TIME_COLUMN].values)
    num_times = len(valid_times_unix_sec)

    for i in range(num_times):
        these_current_rows = numpy.where(
            storm_object_table[tracking_utils.VALID_TIME_COLUMN].values ==
            valid_times_unix_sec[i])[0]

        these_current_subrows = _filter_storm_objects_latlng(
            storm_object_table=storm_object_table.iloc[these_current_rows],
            min_latitude_deg=min_plot_latitude_deg,
            max_latitude_deg=max_plot_latitude_deg,
            min_longitude_deg=min_plot_longitude_deg,
            max_longitude_deg=max_plot_longitude_deg)

        if len(these_current_subrows) == 0:
            continue

        these_current_rows = these_current_rows[these_current_subrows]

        this_storm_object_table = _find_relevant_storm_objects(
            storm_object_table=storm_object_table,
            current_rows=these_current_rows)

        these_latlng_rows = _filter_storm_objects_latlng(
            storm_object_table=this_storm_object_table,
            min_latitude_deg=min_plot_latitude_deg,
            max_latitude_deg=max_plot_latitude_deg,
            min_longitude_deg=min_plot_longitude_deg,
            max_longitude_deg=max_plot_longitude_deg)

        if top_myrorss_dir_name is None:
            this_radar_matrix = None
            these_radar_latitudes_deg = None
            these_radar_longitudes_deg = None
        else:
            this_myrorss_file_name = myrorss_and_mrms_io.find_raw_file(
                top_directory_name=top_myrorss_dir_name,
                unix_time_sec=valid_times_unix_sec[i],
                spc_date_string=time_conversion.time_to_spc_date_string(
                    valid_times_unix_sec[i]),
                field_name=radar_field_name,
                data_source=radar_utils.MYRORSS_SOURCE_ID,
                height_m_asl=radar_height_m_asl,
                raise_error_if_missing=True)

            print(
                'Reading data from: "{0:s}"...'.format(this_myrorss_file_name))

            this_metadata_dict = (
                myrorss_and_mrms_io.read_metadata_from_raw_file(
                    netcdf_file_name=this_myrorss_file_name,
                    data_source=radar_utils.MYRORSS_SOURCE_ID))

            this_sparse_grid_table = (
                myrorss_and_mrms_io.read_data_from_sparse_grid_file(
                    netcdf_file_name=this_myrorss_file_name,
                    field_name_orig=this_metadata_dict[
                        myrorss_and_mrms_io.FIELD_NAME_COLUMN_ORIG],
                    data_source=radar_utils.MYRORSS_SOURCE_ID,
                    sentinel_values=this_metadata_dict[
                        radar_utils.SENTINEL_VALUE_COLUMN]))

            (this_radar_matrix, these_radar_latitudes_deg,
             these_radar_longitudes_deg) = radar_s2f.sparse_to_full_grid(
                 sparse_grid_table=this_sparse_grid_table,
                 metadata_dict=this_metadata_dict)

            this_radar_matrix = numpy.flipud(this_radar_matrix)
            these_radar_latitudes_deg = these_radar_latitudes_deg[::-1]

        _, this_axes_object, this_basemap_object = (
            plotting_utils.create_equidist_cylindrical_map(
                min_latitude_deg=min_plot_latitude_deg,
                max_latitude_deg=max_plot_latitude_deg,
                min_longitude_deg=min_plot_longitude_deg,
                max_longitude_deg=max_plot_longitude_deg,
                resolution_string='i'))

        _plot_storm_outlines_one_time(
            storm_object_table=this_storm_object_table.iloc[these_latlng_rows],
            valid_time_unix_sec=valid_times_unix_sec[i],
            axes_object=this_axes_object,
            basemap_object=this_basemap_object,
            storm_colour=storm_colour,
            storm_opacity=storm_opacity,
            include_secondary_ids=include_secondary_ids,
            output_dir_name=output_dir_name,
            radar_matrix=this_radar_matrix,
            radar_field_name=radar_field_name,
            radar_latitudes_deg=these_radar_latitudes_deg,
            radar_longitudes_deg=these_radar_longitudes_deg)
def _find_tracking_gaps(first_spc_date_string, last_spc_date_string,
                        top_tracking_dir_name, tracking_scale_metres2,
                        source_name, min_time_diff_seconds):
    """Finds gaps (temporal discontinuities) between storm-tracking files.

    :param first_spc_date_string: See documentation at top of file.
    :param last_spc_date_string: Same.
    :param top_tracking_dir_name: Same.
    :param tracking_scale_metres2: Same.
    :param source_name: Same.
    :param min_time_diff_seconds: Same.
    """

    spc_date_strings = time_conversion.get_spc_dates_in_range(
        first_spc_date_string=first_spc_date_string,
        last_spc_date_string=last_spc_date_string)

    tracking_file_names = []
    unix_times_sec = numpy.array([], dtype=int)
    num_spc_dates = len(spc_date_strings)

    for i in range(num_spc_dates):
        print('Finding tracking files for SPC date "{0:s}"...'.format(
            spc_date_strings[i]))

        these_file_names = tracking_io.find_files_one_spc_date(
            spc_date_string=spc_date_strings[i],
            source_name=source_name,
            top_tracking_dir_name=top_tracking_dir_name,
            tracking_scale_metres2=tracking_scale_metres2,
            raise_error_if_missing=False)[0]

        print(len(these_file_names))

        if not len(these_file_names):
            continue

        these_file_sizes_bytes = numpy.array(
            [os.path.getsize(f) for f in these_file_names], dtype=int)
        these_valid_indices = numpy.where(
            these_file_sizes_bytes > FILE_SIZE_WITHOUT_STORMS_BYTES)[0]
        these_file_names = [these_file_names[k] for k in these_valid_indices]

        these_unix_times_sec = numpy.array(
            [tracking_io.file_name_to_time(f) for f in these_file_names],
            dtype=int)

        these_sort_indices = numpy.argsort(these_unix_times_sec)
        these_unix_times_sec = these_unix_times_sec[these_sort_indices]
        these_file_names = [these_file_names[k] for k in these_sort_indices]

        tracking_file_names += these_file_names
        unix_times_sec = numpy.concatenate(
            (unix_times_sec, these_unix_times_sec))

    time_diffs_seconds = numpy.diff(unix_times_sec)
    time_gap_indices = numpy.where(
        time_diffs_seconds >= min_time_diff_seconds)[0]

    num_time_gaps = len(time_gap_indices)

    print((
        '\nThere are {0:d} time gaps (successive files >= {1:d} seconds apart),'
        ' listed below:\n').format(num_time_gaps, min_time_diff_seconds))

    for i in time_gap_indices:
        this_start_time_string = time_conversion.unix_sec_to_string(
            unix_times_sec[i], TIME_FORMAT)
        this_end_time_string = time_conversion.unix_sec_to_string(
            unix_times_sec[i + 1], TIME_FORMAT)

        print('Gap between {0:s} and {1:s} = {2:d} seconds'.format(
            this_start_time_string, this_end_time_string,
            time_diffs_seconds[i]))
Beispiel #11
0
def _run(top_input_dir_name, tracking_scale_metres2, first_spc_date_string,
         last_spc_date_string, min_distances_metres, max_distances_metres,
         top_output_dir_name):
    """Creates one or more distance buffers around each storm object (polygon).

    This is effectively the main method.

    :param top_input_dir_name: See documentation at top of file.
    :param tracking_scale_metres2: Same.
    :param first_spc_date_string: Same.
    :param last_spc_date_string: Same.
    :param min_distances_metres: Same.
    :param max_distances_metres: Same.
    :param top_output_dir_name: Same.
    """

    min_distances_metres[min_distances_metres < 0] = numpy.nan

    spc_date_strings = time_conversion.get_spc_dates_in_range(
        first_spc_date_string=first_spc_date_string,
        last_spc_date_string=last_spc_date_string)

    for this_spc_date_string in spc_date_strings:
        these_input_file_names = tracking_io.find_files_one_spc_date(
            top_tracking_dir_name=top_input_dir_name,
            tracking_scale_metres2=tracking_scale_metres2,
            source_name=tracking_utils.SEGMOTION_NAME,
            spc_date_string=this_spc_date_string,
            raise_error_if_missing=False)[0]

        if len(these_input_file_names) == 0:
            continue

        for this_input_file_name in these_input_file_names:
            print('Reading input tracks from: "{0:s}"...'.format(
                this_input_file_name))

            this_storm_object_table = tracking_io.read_file(
                this_input_file_name)

            this_storm_object_table = (tracking_utils.create_distance_buffers(
                storm_object_table=this_storm_object_table,
                min_distances_metres=min_distances_metres,
                max_distances_metres=max_distances_metres))

            this_output_file_name = tracking_io.find_file(
                top_tracking_dir_name=top_output_dir_name,
                tracking_scale_metres2=tracking_scale_metres2,
                source_name=tracking_utils.SEGMOTION_NAME,
                valid_time_unix_sec=tracking_io.file_name_to_time(
                    this_input_file_name),
                spc_date_string=this_spc_date_string,
                raise_error_if_missing=False)

            print('Writing input tracks + buffers to: "{0:s}"...\n'.format(
                this_output_file_name))

            tracking_io.write_file(storm_object_table=this_storm_object_table,
                                   pickle_file_name=this_output_file_name)

        print(SEPARATOR_STRING)
def _run(top_orig_tracking_dir_name, top_new_tracking_dir_name,
         first_spc_date_string, last_spc_date_string, output_file_name):
    """Plots storms that were removed by remove_storms_outside_conus.py.

    This is effectively the main method.

    :param top_orig_tracking_dir_name: See documentation at top of file.
    :param top_new_tracking_dir_name: Same.
    :param first_spc_date_string: Same.
    :param last_spc_date_string: Same.
    :param output_file_name: Same.
    """

    file_system_utils.mkdir_recursive_if_necessary(file_name=output_file_name)

    spc_date_strings = time_conversion.get_spc_dates_in_range(
        first_spc_date_string=first_spc_date_string,
        last_spc_date_string=last_spc_date_string)

    orig_tracking_file_names = []

    for d in spc_date_strings:
        orig_tracking_file_names += tracking_io.find_files_one_spc_date(
            top_tracking_dir_name=top_orig_tracking_dir_name,
            tracking_scale_metres2=DUMMY_TRACKING_SCALE_METRES2,
            source_name=tracking_utils.SEGMOTION_NAME,
            spc_date_string=d,
            raise_error_if_missing=False)[0]

    valid_times_unix_sec = numpy.array(
        [tracking_io.file_name_to_time(f) for f in orig_tracking_file_names],
        dtype=int)

    new_tracking_file_names = [
        tracking_io.find_file(
            top_tracking_dir_name=top_new_tracking_dir_name,
            tracking_scale_metres2=DUMMY_TRACKING_SCALE_METRES2,
            source_name=tracking_utils.SEGMOTION_NAME,
            valid_time_unix_sec=t,
            spc_date_string=time_conversion.time_to_spc_date_string(t),
            raise_error_if_missing=True) for t in valid_times_unix_sec
    ]

    orig_storm_object_table = tracking_io.read_many_files(
        orig_tracking_file_names)
    print(SEPARATOR_STRING)

    new_storm_object_table = tracking_io.read_many_files(
        new_tracking_file_names)
    print(SEPARATOR_STRING)

    orig_storm_id_strings = (
        orig_storm_object_table[tracking_utils.FULL_ID_COLUMN].values.tolist())
    orig_storm_times_unix_sec = (
        orig_storm_object_table[tracking_utils.VALID_TIME_COLUMN].values)
    new_storm_id_strings = (
        new_storm_object_table[tracking_utils.FULL_ID_COLUMN].values.tolist())
    new_storm_times_unix_sec = (
        new_storm_object_table[tracking_utils.VALID_TIME_COLUMN].values)

    num_orig_storm_objects = len(orig_storm_object_table.index)
    orig_kept_flags = numpy.full(num_orig_storm_objects, 0, dtype=bool)

    these_indices = tracking_utils.find_storm_objects(
        all_id_strings=orig_storm_id_strings,
        all_times_unix_sec=orig_storm_times_unix_sec,
        id_strings_to_keep=new_storm_id_strings,
        times_to_keep_unix_sec=new_storm_times_unix_sec,
        allow_missing=False)

    orig_kept_flags[these_indices] = True
    orig_removed_indices = numpy.where(numpy.invert(orig_kept_flags))[0]
    print('{0:d} of {1:d} storm objects were outside CONUS.'.format(
        len(orig_removed_indices), num_orig_storm_objects))

    removed_storm_object_table = orig_storm_object_table.iloc[
        orig_removed_indices]
    removed_latitudes_deg = removed_storm_object_table[
        tracking_utils.CENTROID_LATITUDE_COLUMN].values

    removed_longitudes_deg = removed_storm_object_table[
        tracking_utils.CENTROID_LONGITUDE_COLUMN].values

    figure_object, axes_object, basemap_object = (
        plotting_utils.create_equidist_cylindrical_map(
            min_latitude_deg=numpy.min(removed_latitudes_deg) - 1.,
            max_latitude_deg=numpy.max(removed_latitudes_deg) + 1.,
            min_longitude_deg=numpy.min(removed_longitudes_deg) - 1.,
            max_longitude_deg=numpy.max(removed_longitudes_deg) + 1.,
            resolution_string='i'))

    plotting_utils.plot_coastlines(basemap_object=basemap_object,
                                   axes_object=axes_object,
                                   line_colour=BORDER_COLOUR)
    plotting_utils.plot_countries(basemap_object=basemap_object,
                                  axes_object=axes_object,
                                  line_colour=BORDER_COLOUR)
    plotting_utils.plot_states_and_provinces(basemap_object=basemap_object,
                                             axes_object=axes_object,
                                             line_colour=BORDER_COLOUR)
    plotting_utils.plot_parallels(basemap_object=basemap_object,
                                  axes_object=axes_object,
                                  num_parallels=NUM_PARALLELS)
    plotting_utils.plot_meridians(basemap_object=basemap_object,
                                  axes_object=axes_object,
                                  num_meridians=NUM_MERIDIANS)

    conus_latitudes_deg, conus_longitudes_deg = (
        conus_boundary.read_from_netcdf())
    conus_latitudes_deg, conus_longitudes_deg = conus_boundary.erode_boundary(
        latitudes_deg=conus_latitudes_deg,
        longitudes_deg=conus_longitudes_deg,
        erosion_distance_metres=EROSION_DISTANCE_METRES)

    axes_object.plot(conus_longitudes_deg,
                     conus_latitudes_deg,
                     color=LINE_COLOUR,
                     linestyle='solid',
                     linewidth=LINE_WIDTH)
    axes_object.plot(removed_longitudes_deg,
                     removed_latitudes_deg,
                     linestyle='None',
                     marker=MARKER_TYPE,
                     markersize=MARKER_SIZE,
                     markeredgewidth=0,
                     markerfacecolor=MARKER_COLOUR,
                     markeredgecolor=MARKER_COLOUR)

    print('Saving figure to: "{0:s}"...'.format(output_file_name))
    figure_object.savefig(output_file_name,
                          dpi=FIGURE_RESOLUTION_DPI,
                          pad_inches=0,
                          bbox_inches='tight')
    pyplot.close(figure_object)
def _find_tracking_files_one_example(top_tracking_dir_name,
                                     valid_time_unix_sec, target_name):
    """Finds tracking files needed to make plots for one example.

    :param top_tracking_dir_name: See documentation at top of file.
    :param valid_time_unix_sec: Valid time for example.
    :param target_name: Name of target variable.
    :return: tracking_file_names: 1-D list of paths to tracking files.
    :raises: ValueError: if no tracking files are found.
    """

    target_param_dict = target_val_utils.target_name_to_params(target_name)
    min_lead_time_seconds = target_param_dict[
        target_val_utils.MIN_LEAD_TIME_KEY]
    max_lead_time_seconds = target_param_dict[
        target_val_utils.MAX_LEAD_TIME_KEY]

    first_time_unix_sec = valid_time_unix_sec + min_lead_time_seconds
    last_time_unix_sec = valid_time_unix_sec + max_lead_time_seconds

    first_spc_date_string = time_conversion.time_to_spc_date_string(
        first_time_unix_sec - TIME_INTERVAL_SECONDS)
    last_spc_date_string = time_conversion.time_to_spc_date_string(
        last_time_unix_sec + TIME_INTERVAL_SECONDS)
    spc_date_strings = time_conversion.get_spc_dates_in_range(
        first_spc_date_string=first_spc_date_string,
        last_spc_date_string=last_spc_date_string)

    tracking_file_names = []

    for this_spc_date_string in spc_date_strings:
        these_file_names = tracking_io.find_files_one_spc_date(
            top_tracking_dir_name=top_tracking_dir_name,
            tracking_scale_metres2=echo_top_tracking.
            DUMMY_TRACKING_SCALE_METRES2,
            source_name=tracking_utils.SEGMOTION_NAME,
            spc_date_string=this_spc_date_string,
            raise_error_if_missing=False)[0]

        tracking_file_names += these_file_names

    if len(tracking_file_names) == 0:
        error_string = (
            'Cannot find any tracking files for SPC dates "{0:s}" to "{1:s}".'
        ).format(first_spc_date_string, last_spc_date_string)

        raise ValueError(error_string)

    tracking_times_unix_sec = numpy.array(
        [tracking_io.file_name_to_time(f) for f in tracking_file_names],
        dtype=int)

    sort_indices = numpy.argsort(tracking_times_unix_sec)
    tracking_times_unix_sec = tracking_times_unix_sec[sort_indices]
    tracking_file_names = [tracking_file_names[k] for k in sort_indices]

    these_indices = numpy.where(
        tracking_times_unix_sec <= first_time_unix_sec)[0]

    if len(these_indices) == 0:
        first_index = 0
    else:
        first_index = these_indices[-1]

    these_indices = numpy.where(
        tracking_times_unix_sec >= last_time_unix_sec)[0]

    if len(these_indices) == 0:
        last_index = len(tracking_file_names) - 1
    else:
        last_index = these_indices[0]

    return tracking_file_names[first_index:(last_index + 1)]
def _extract_storm_images(num_image_rows, num_image_columns, rotate_grids,
                          rotated_grid_spacing_metres, radar_field_names,
                          refl_heights_m_agl, spc_date_string,
                          tarred_myrorss_dir_name, untarred_myrorss_dir_name,
                          top_tracking_dir_name, elevation_dir_name,
                          tracking_scale_metres2, target_name,
                          top_target_dir_name, top_output_dir_name):
    """Extracts storm-centered img for each field/height pair and storm object.

    :param num_image_rows: See documentation at top of file.
    :param num_image_columns: Same.
    :param rotate_grids: Same.
    :param rotated_grid_spacing_metres: Same.
    :param radar_field_names: Same.
    :param refl_heights_m_agl: Same.
    :param spc_date_string: Same.
    :param tarred_myrorss_dir_name: Same.
    :param untarred_myrorss_dir_name: Same.
    :param top_tracking_dir_name: Same.
    :param elevation_dir_name: Same.
    :param tracking_scale_metres2: Same.
    :param target_name: Same.
    :param top_target_dir_name: Same.
    :param top_output_dir_name: Same.
    """

    if target_name in ['', 'None']:
        target_name = None

    if target_name is not None:
        target_param_dict = target_val_utils.target_name_to_params(target_name)

        target_file_name = target_val_utils.find_target_file(
            top_directory_name=top_target_dir_name,
            event_type_string=target_param_dict[
                target_val_utils.EVENT_TYPE_KEY],
            spc_date_string=spc_date_string)

        print('Reading data from: "{0:s}"...'.format(target_file_name))
        target_dict = target_val_utils.read_target_values(
            netcdf_file_name=target_file_name, target_names=[target_name])
        print('\n')

    refl_heights_m_asl = radar_utils.get_valid_heights(
        data_source=radar_utils.MYRORSS_SOURCE_ID,
        field_name=radar_utils.REFL_NAME)

    # Untar files with azimuthal shear.
    az_shear_field_names = list(
        set(radar_field_names) & set(ALL_AZ_SHEAR_FIELD_NAMES))

    if len(az_shear_field_names):
        az_shear_tar_file_name = (
            '{0:s}/{1:s}/azimuthal_shear_only/{2:s}.tar').format(
                tarred_myrorss_dir_name, spc_date_string[:4], spc_date_string)

        myrorss_io.unzip_1day_tar_file(
            tar_file_name=az_shear_tar_file_name,
            field_names=az_shear_field_names,
            spc_date_string=spc_date_string,
            top_target_directory_name=untarred_myrorss_dir_name)
        print(SEPARATOR_STRING)

    # Untar files with other radar fields.
    non_shear_field_names = list(
        set(radar_field_names) - set(ALL_AZ_SHEAR_FIELD_NAMES))

    if len(non_shear_field_names):
        non_shear_tar_file_name = '{0:s}/{1:s}/{2:s}.tar'.format(
            tarred_myrorss_dir_name, spc_date_string[:4], spc_date_string)

        myrorss_io.unzip_1day_tar_file(
            tar_file_name=non_shear_tar_file_name,
            field_names=non_shear_field_names,
            spc_date_string=spc_date_string,
            top_target_directory_name=untarred_myrorss_dir_name,
            refl_heights_m_asl=refl_heights_m_asl)
        print(SEPARATOR_STRING)

    # Read storm tracks for the given SPC date.
    tracking_file_names = tracking_io.find_files_one_spc_date(
        spc_date_string=spc_date_string,
        source_name=tracking_utils.SEGMOTION_NAME,
        top_tracking_dir_name=top_tracking_dir_name,
        tracking_scale_metres2=tracking_scale_metres2)[0]

    storm_object_table = tracking_io.read_many_files(tracking_file_names)[
        storm_images.STORM_COLUMNS_NEEDED]
    print(SEPARATOR_STRING)

    if target_name is not None:
        print(('Removing storm objects without target values (variable = '
               '"{0:s}")...').format(target_name))

        these_indices = tracking_utils.find_storm_objects(
            all_id_strings=storm_object_table[
                tracking_utils.FULL_ID_COLUMN].values.tolist(),
            all_times_unix_sec=storm_object_table[
                tracking_utils.VALID_TIME_COLUMN].values.astype(int),
            id_strings_to_keep=target_dict[target_val_utils.FULL_IDS_KEY],
            times_to_keep_unix_sec=target_dict[
                target_val_utils.VALID_TIMES_KEY],
            allow_missing=False)

        num_storm_objects_orig = len(storm_object_table.index)
        storm_object_table = storm_object_table.iloc[these_indices]
        num_storm_objects = len(storm_object_table.index)

        print('Removed {0:d} of {1:d} storm objects!\n'.format(
            num_storm_objects_orig - num_storm_objects,
            num_storm_objects_orig))

    # Extract storm-centered radar images.
    storm_images.extract_storm_images_myrorss_or_mrms(
        storm_object_table=storm_object_table,
        radar_source=radar_utils.MYRORSS_SOURCE_ID,
        top_radar_dir_name=untarred_myrorss_dir_name,
        top_output_dir_name=top_output_dir_name,
        elevation_dir_name=elevation_dir_name,
        num_storm_image_rows=num_image_rows,
        num_storm_image_columns=num_image_columns,
        rotate_grids=rotate_grids,
        rotated_grid_spacing_metres=rotated_grid_spacing_metres,
        radar_field_names=radar_field_names,
        reflectivity_heights_m_agl=refl_heights_m_agl)
    print(SEPARATOR_STRING)

    # Remove untarred MYRORSS files.
    myrorss_io.remove_unzipped_data_1day(
        spc_date_string=spc_date_string,
        top_directory_name=untarred_myrorss_dir_name,
        field_names=radar_field_names,
        refl_heights_m_asl=refl_heights_m_asl)
    print(SEPARATOR_STRING)
Beispiel #15
0
def _extract_storm_images(
        num_image_rows, num_image_columns, rotate_grids,
        rotated_grid_spacing_metres, radar_field_names, refl_heights_m_agl,
        spc_date_string, first_time_string, last_time_string,
        tarred_myrorss_dir_name, untarred_myrorss_dir_name,
        top_tracking_dir_name, elevation_dir_name, tracking_scale_metres2,
        target_name, top_target_dir_name, top_output_dir_name):
    """Extracts storm-centered img for each field/height pair and storm object.

    :param num_image_rows: See documentation at top of file.
    :param num_image_columns: Same.
    :param rotate_grids: Same.
    :param rotated_grid_spacing_metres: Same.
    :param radar_field_names: Same.
    :param refl_heights_m_agl: Same.
    :param spc_date_string: Same.
    :param first_time_string: Same.
    :param last_time_string: Same.
    :param tarred_myrorss_dir_name: Same.
    :param untarred_myrorss_dir_name: Same.
    :param top_tracking_dir_name: Same.
    :param elevation_dir_name: Same.
    :param tracking_scale_metres2: Same.
    :param target_name: Same.
    :param top_target_dir_name: Same.
    :param top_output_dir_name: Same.
    :raises: ValueError: if `first_time_string` and `last_time_string` have
        different SPC dates.
    """

    if elevation_dir_name in ['', 'None']:
        elevation_dir_name = None

    if elevation_dir_name is None:
        host_name = socket.gethostname()

        if 'casper' in host_name:
            elevation_dir_name = '/glade/work/ryanlage/elevation'
        else:
            elevation_dir_name = '/condo/swatwork/ralager/elevation'

    if spc_date_string in ['', 'None']:
        first_time_unix_sec = time_conversion.string_to_unix_sec(
            first_time_string, TIME_FORMAT)
        last_time_unix_sec = time_conversion.string_to_unix_sec(
            last_time_string, TIME_FORMAT)

        first_spc_date_string = time_conversion.time_to_spc_date_string(
            first_time_unix_sec)
        last_spc_date_string = time_conversion.time_to_spc_date_string(
            last_time_unix_sec)

        if first_spc_date_string != last_spc_date_string:
            error_string = (
                'First ({0:s}) and last ({1:s}) times have different SPC dates.'
                '  This script can handle only one SPC date.'
            ).format(first_time_string, last_time_string)

            raise ValueError(error_string)

        spc_date_string = first_spc_date_string
    else:
        first_time_unix_sec = 0
        last_time_unix_sec = int(1e12)

    if tarred_myrorss_dir_name in ['', 'None']:
        tarred_myrorss_dir_name = None
    if target_name in ['', 'None']:
        target_name = None

    if target_name is not None:
        target_param_dict = target_val_utils.target_name_to_params(target_name)

        target_file_name = target_val_utils.find_target_file(
            top_directory_name=top_target_dir_name,
            event_type_string=target_param_dict[
                target_val_utils.EVENT_TYPE_KEY],
            spc_date_string=spc_date_string)

        print('Reading data from: "{0:s}"...'.format(target_file_name))
        target_dict = target_val_utils.read_target_values(
            netcdf_file_name=target_file_name, target_names=[target_name]
        )
        print('\n')

    refl_heights_m_asl = radar_utils.get_valid_heights(
        data_source=radar_utils.MYRORSS_SOURCE_ID,
        field_name=radar_utils.REFL_NAME)

    # Untar files.
    if tarred_myrorss_dir_name is not None:
        az_shear_field_names = list(
            set(radar_field_names) & set(ALL_AZ_SHEAR_FIELD_NAMES)
        )

        if len(az_shear_field_names) > 0:
            az_shear_tar_file_name = (
                '{0:s}/{1:s}/azimuthal_shear_only/{2:s}.tar'
            ).format(
                tarred_myrorss_dir_name, spc_date_string[:4], spc_date_string
            )

            myrorss_io.unzip_1day_tar_file(
                tar_file_name=az_shear_tar_file_name,
                field_names=az_shear_field_names,
                spc_date_string=spc_date_string,
                top_target_directory_name=untarred_myrorss_dir_name)
            print(SEPARATOR_STRING)

        non_shear_field_names = list(
            set(radar_field_names) - set(ALL_AZ_SHEAR_FIELD_NAMES)
        )

        if len(non_shear_field_names) > 0:
            non_shear_tar_file_name = '{0:s}/{1:s}/{2:s}.tar'.format(
                tarred_myrorss_dir_name, spc_date_string[:4], spc_date_string
            )

            myrorss_io.unzip_1day_tar_file(
                tar_file_name=non_shear_tar_file_name,
                field_names=non_shear_field_names,
                spc_date_string=spc_date_string,
                top_target_directory_name=untarred_myrorss_dir_name,
                refl_heights_m_asl=refl_heights_m_asl)
            print(SEPARATOR_STRING)

    # Read storm tracks for the given SPC date.
    tracking_file_names = tracking_io.find_files_one_spc_date(
        spc_date_string=spc_date_string,
        source_name=tracking_utils.SEGMOTION_NAME,
        top_tracking_dir_name=top_tracking_dir_name,
        tracking_scale_metres2=tracking_scale_metres2
    )[0]

    file_times_unix_sec = numpy.array(
        [tracking_io.file_name_to_time(f) for f in tracking_file_names],
        dtype=int
    )

    good_indices = numpy.where(numpy.logical_and(
        file_times_unix_sec >= first_time_unix_sec,
        file_times_unix_sec <= last_time_unix_sec
    ))[0]

    tracking_file_names = [tracking_file_names[k] for k in good_indices]

    storm_object_table = tracking_io.read_many_files(
        tracking_file_names
    )[storm_images.STORM_COLUMNS_NEEDED]
    print(SEPARATOR_STRING)

    if target_name is not None:
        print((
            'Removing storm objects without target values (variable = '
            '"{0:s}")...'
        ).format(target_name))

        these_indices = tracking_utils.find_storm_objects(
            all_id_strings=storm_object_table[
                tracking_utils.FULL_ID_COLUMN].values.tolist(),
            all_times_unix_sec=storm_object_table[
                tracking_utils.VALID_TIME_COLUMN].values.astype(int),
            id_strings_to_keep=target_dict[target_val_utils.FULL_IDS_KEY],
            times_to_keep_unix_sec=target_dict[
                target_val_utils.VALID_TIMES_KEY],
            allow_missing=False)

        num_storm_objects_orig = len(storm_object_table.index)
        storm_object_table = storm_object_table.iloc[these_indices]
        num_storm_objects = len(storm_object_table.index)

        print('Removed {0:d} of {1:d} storm objects!\n'.format(
            num_storm_objects_orig - num_storm_objects, num_storm_objects_orig
        ))

    # Extract storm-centered radar images.
    storm_images.extract_storm_images_myrorss_or_mrms(
        storm_object_table=storm_object_table,
        radar_source=radar_utils.MYRORSS_SOURCE_ID,
        top_radar_dir_name=untarred_myrorss_dir_name,
        top_output_dir_name=top_output_dir_name,
        elevation_dir_name=elevation_dir_name,
        num_storm_image_rows=num_image_rows,
        num_storm_image_columns=num_image_columns, rotate_grids=rotate_grids,
        rotated_grid_spacing_metres=rotated_grid_spacing_metres,
        radar_field_names=radar_field_names,
        reflectivity_heights_m_agl=refl_heights_m_agl)
    print(SEPARATOR_STRING)

    # Remove untarred MYRORSS files.
    if tarred_myrorss_dir_name is not None:
        myrorss_io.remove_unzipped_data_1day(
            spc_date_string=spc_date_string,
            top_directory_name=untarred_myrorss_dir_name,
            field_names=radar_field_names,
            refl_heights_m_asl=refl_heights_m_asl)
def _run(myrorss_tracking_dir_name, gridrad_tracking_dir_name,
         max_distance_metres, source_dataset_name, first_spc_date_string,
         last_spc_date_string, output_dir_name):
    """Matches storm objects between MYRORSS and GridRad datasets.

    This is effectively the main method.

    :param myrorss_tracking_dir_name: See documentation at end of file.
    :param gridrad_tracking_dir_name: Same.
    :param max_distance_metres: Same.
    :param source_dataset_name: Same.
    :param first_spc_date_string: Same.
    :param last_spc_date_string: Same.
    :param output_dir_name: Same.
    :raises: ValueError: if `source_dataset_name not in VALID_DATASET_NAMES`.
    """

    if source_dataset_name not in VALID_DATASET_NAMES:
        error_string = (
            '\n{0:s}\nValid datasets (listed above) do not include "{1:s}".'
        ).format(str(VALID_DATASET_NAMES), source_dataset_name)

        raise ValueError(error_string)

    spc_date_strings = time_conversion.get_spc_dates_in_range(
        first_spc_date_string=first_spc_date_string,
        last_spc_date_string=last_spc_date_string)

    if source_dataset_name == radar_utils.MYRORSS_SOURCE_ID:
        source_tracking_dir_name = myrorss_tracking_dir_name
        target_tracking_dir_name = gridrad_tracking_dir_name
        target_dataset_name = radar_utils.GRIDRAD_SOURCE_ID
    else:
        source_tracking_dir_name = gridrad_tracking_dir_name
        target_tracking_dir_name = myrorss_tracking_dir_name
        target_dataset_name = radar_utils.MYRORSS_SOURCE_ID

    source_tracking_file_names = []
    target_tracking_file_names = []

    for this_spc_date_string in spc_date_strings:
        source_tracking_file_names += tracking_io.find_files_one_spc_date(
            top_tracking_dir_name=source_tracking_dir_name,
            tracking_scale_metres2=TRACKING_SCALE_METRES2,
            source_name=tracking_utils.SEGMOTION_NAME,
            spc_date_string=this_spc_date_string,
            raise_error_if_missing=True)[0]

        target_tracking_file_names += tracking_io.find_files_one_spc_date(
            top_tracking_dir_name=target_tracking_dir_name,
            tracking_scale_metres2=TRACKING_SCALE_METRES2,
            source_name=tracking_utils.SEGMOTION_NAME,
            spc_date_string=this_spc_date_string,
            raise_error_if_missing=True)[0]

    source_times_unix_sec = numpy.array(
        [tracking_io.file_name_to_time(f) for f in source_tracking_file_names],
        dtype=int)

    target_times_unix_sec = numpy.array(
        [tracking_io.file_name_to_time(f) for f in target_tracking_file_names],
        dtype=int)

    source_to_target_indices = _match_all_times(
        source_times_unix_sec=source_times_unix_sec,
        target_times_unix_sec=target_times_unix_sec,
        max_diff_seconds=MAX_TIME_DIFF_SECONDS)
    print(SEPARATOR_STRING)

    del target_times_unix_sec
    target_tracking_file_names = [
        target_tracking_file_names[k] for k in source_to_target_indices
    ]

    num_source_times = len(source_times_unix_sec)

    for i in range(num_source_times):
        print('Reading data from: "{0:s}"...'.format(
            source_tracking_file_names[i]))
        this_source_object_table = tracking_io.read_file(
            source_tracking_file_names[i])

        print('Reading data from: "{0:s}"...'.format(
            target_tracking_file_names[i]))
        this_target_object_table = tracking_io.read_file(
            target_tracking_file_names[i])

        this_source_to_target_dict = _match_locations_one_time(
            source_object_table=this_source_object_table,
            target_object_table=this_target_object_table,
            max_distance_metres=max_distance_metres)

        this_match_file_name = tracking_io.find_match_file(
            top_directory_name=output_dir_name,
            valid_time_unix_sec=source_times_unix_sec[i],
            raise_error_if_missing=False)

        print('Writing results to: "{0:s}"...\n'.format(this_match_file_name))
        tracking_io.write_matches(
            pickle_file_name=this_match_file_name,
            source_to_target_dict=this_source_to_target_dict,
            max_time_diff_seconds=MAX_TIME_DIFF_SECONDS,
            max_distance_metres=max_distance_metres,
            source_dataset_name=source_dataset_name,
            source_tracking_dir_name=source_tracking_dir_name,
            target_dataset_name=target_dataset_name,
            target_tracking_dir_name=target_tracking_dir_name)
Beispiel #17
0
def _run(storm_metafile_name, top_tracking_dir_name, lead_time_seconds,
         output_file_name):
    """Plots spatial distribution of examples (storm objects) in file.

    This is effectively the main method.

    :param storm_metafile_name: See documentation at top of file.
    :param top_tracking_dir_name: Same.
    :param lead_time_seconds: Same.
    :param output_file_name: Same.
    """

    file_system_utils.mkdir_recursive_if_necessary(file_name=output_file_name)

    # Read storm metadata.
    print(
        'Reading storm metadata from: "{0:s}"...'.format(storm_metafile_name))
    orig_full_id_strings, orig_times_unix_sec = (
        tracking_io.read_ids_and_times(storm_metafile_name))
    orig_primary_id_strings = temporal_tracking.full_to_partial_ids(
        orig_full_id_strings)[0]

    # Find relevant tracking files.
    spc_date_strings = [
        time_conversion.time_to_spc_date_string(t) for t in orig_times_unix_sec
    ]
    spc_date_strings += [
        time_conversion.time_to_spc_date_string(t + lead_time_seconds)
        for t in orig_times_unix_sec
    ]
    spc_date_strings = list(set(spc_date_strings))

    tracking_file_names = []

    for this_spc_date_string in spc_date_strings:
        tracking_file_names += tracking_io.find_files_one_spc_date(
            top_tracking_dir_name=top_tracking_dir_name,
            tracking_scale_metres2=DUMMY_TRACKING_SCALE_METRES2,
            source_name=tracking_utils.SEGMOTION_NAME,
            spc_date_string=this_spc_date_string,
            raise_error_if_missing=False)[0]

    file_times_unix_sec = numpy.array(
        [tracking_io.file_name_to_time(f) for f in tracking_file_names],
        dtype=int)

    num_orig_storm_objects = len(orig_full_id_strings)
    num_files = len(file_times_unix_sec)
    keep_file_flags = numpy.full(num_files, 0, dtype=bool)

    for i in range(num_orig_storm_objects):
        these_flags = numpy.logical_and(
            file_times_unix_sec >= orig_times_unix_sec[i],
            file_times_unix_sec <= orig_times_unix_sec[i] + lead_time_seconds)
        keep_file_flags = numpy.logical_or(keep_file_flags, these_flags)

    del file_times_unix_sec
    keep_file_indices = numpy.where(keep_file_flags)[0]
    tracking_file_names = [tracking_file_names[k] for k in keep_file_indices]

    # Read relevant tracking files.
    num_files = len(tracking_file_names)
    storm_object_tables = [None] * num_files
    print(SEPARATOR_STRING)

    for i in range(num_files):
        print('Reading data from: "{0:s}"...'.format(tracking_file_names[i]))
        this_table = tracking_io.read_file(tracking_file_names[i])

        storm_object_tables[i] = this_table.loc[this_table[
            tracking_utils.PRIMARY_ID_COLUMN].isin(
                numpy.array(orig_primary_id_strings))]

        if i == 0:
            continue

        storm_object_tables[i] = storm_object_tables[i].align(
            storm_object_tables[0], axis=1)[0]

    storm_object_table = pandas.concat(storm_object_tables,
                                       axis=0,
                                       ignore_index=True)
    print(SEPARATOR_STRING)

    # Find relevant storm objects.
    orig_object_rows = tracking_utils.find_storm_objects(
        all_id_strings=storm_object_table[
            tracking_utils.FULL_ID_COLUMN].values.tolist(),
        all_times_unix_sec=storm_object_table[
            tracking_utils.VALID_TIME_COLUMN].values,
        id_strings_to_keep=orig_full_id_strings,
        times_to_keep_unix_sec=orig_times_unix_sec)

    good_object_rows = numpy.array([], dtype=int)

    for i in range(num_orig_storm_objects):
        # Non-merging successors only!

        first_rows = temporal_tracking.find_successors(
            storm_object_table=storm_object_table,
            target_row=orig_object_rows[i],
            num_seconds_forward=lead_time_seconds,
            max_num_sec_id_changes=1,
            change_type_string=temporal_tracking.SPLIT_STRING,
            return_all_on_path=True)

        second_rows = temporal_tracking.find_successors(
            storm_object_table=storm_object_table,
            target_row=orig_object_rows[i],
            num_seconds_forward=lead_time_seconds,
            max_num_sec_id_changes=0,
            change_type_string=temporal_tracking.MERGER_STRING,
            return_all_on_path=True)

        first_rows = first_rows.tolist()
        second_rows = second_rows.tolist()
        these_rows = set(first_rows) & set(second_rows)
        these_rows = numpy.array(list(these_rows), dtype=int)

        good_object_rows = numpy.concatenate((good_object_rows, these_rows))

    good_object_rows = numpy.unique(good_object_rows)
    storm_object_table = storm_object_table.iloc[good_object_rows]

    times_of_day_sec = numpy.mod(
        storm_object_table[tracking_utils.VALID_TIME_COLUMN].values,
        NUM_SECONDS_IN_DAY)
    storm_object_table = storm_object_table.assign(
        **{tracking_utils.VALID_TIME_COLUMN: times_of_day_sec})

    min_plot_latitude_deg = -LATLNG_BUFFER_DEG + numpy.min(
        storm_object_table[tracking_utils.CENTROID_LATITUDE_COLUMN].values)
    max_plot_latitude_deg = LATLNG_BUFFER_DEG + numpy.max(
        storm_object_table[tracking_utils.CENTROID_LATITUDE_COLUMN].values)
    min_plot_longitude_deg = -LATLNG_BUFFER_DEG + numpy.min(
        storm_object_table[tracking_utils.CENTROID_LONGITUDE_COLUMN].values)
    max_plot_longitude_deg = LATLNG_BUFFER_DEG + numpy.max(
        storm_object_table[tracking_utils.CENTROID_LONGITUDE_COLUMN].values)

    _, axes_object, basemap_object = (
        plotting_utils.create_equidist_cylindrical_map(
            min_latitude_deg=min_plot_latitude_deg,
            max_latitude_deg=max_plot_latitude_deg,
            min_longitude_deg=min_plot_longitude_deg,
            max_longitude_deg=max_plot_longitude_deg,
            resolution_string='i'))

    plotting_utils.plot_coastlines(basemap_object=basemap_object,
                                   axes_object=axes_object,
                                   line_colour=BORDER_COLOUR,
                                   line_width=BORDER_WIDTH * 2)
    plotting_utils.plot_countries(basemap_object=basemap_object,
                                  axes_object=axes_object,
                                  line_colour=BORDER_COLOUR,
                                  line_width=BORDER_WIDTH)
    plotting_utils.plot_states_and_provinces(basemap_object=basemap_object,
                                             axes_object=axes_object,
                                             line_colour=BORDER_COLOUR,
                                             line_width=BORDER_WIDTH)
    plotting_utils.plot_parallels(basemap_object=basemap_object,
                                  axes_object=axes_object,
                                  num_parallels=NUM_PARALLELS,
                                  line_width=BORDER_WIDTH)
    plotting_utils.plot_meridians(basemap_object=basemap_object,
                                  axes_object=axes_object,
                                  num_meridians=NUM_MERIDIANS,
                                  line_width=BORDER_WIDTH)

    # colour_bar_object = storm_plotting.plot_storm_tracks(
    #     storm_object_table=storm_object_table, axes_object=axes_object,
    #     basemap_object=basemap_object, colour_map_object=COLOUR_MAP_OBJECT,
    #     colour_min_unix_sec=0, colour_max_unix_sec=NUM_SECONDS_IN_DAY - 1,
    #     line_width=TRACK_LINE_WIDTH,
    #     start_marker_type=None, end_marker_type=None
    # )

    colour_bar_object = storm_plotting.plot_storm_centroids(
        storm_object_table=storm_object_table,
        axes_object=axes_object,
        basemap_object=basemap_object,
        colour_map_object=COLOUR_MAP_OBJECT,
        colour_min_unix_sec=0,
        colour_max_unix_sec=NUM_SECONDS_IN_DAY - 1)

    tick_times_unix_sec = numpy.linspace(0,
                                         NUM_SECONDS_IN_DAY,
                                         num=NUM_HOURS_IN_DAY + 1,
                                         dtype=int)
    tick_times_unix_sec = tick_times_unix_sec[:-1]
    tick_times_unix_sec = tick_times_unix_sec[::2]

    tick_time_strings = [
        time_conversion.unix_sec_to_string(t, COLOUR_BAR_TIME_FORMAT)
        for t in tick_times_unix_sec
    ]

    colour_bar_object.set_ticks(tick_times_unix_sec)
    colour_bar_object.set_ticklabels(tick_time_strings)

    print('Saving figure to: "{0:s}"...'.format(output_file_name))
    pyplot.savefig(output_file_name,
                   dpi=FIGURE_RESOLUTION_DPI,
                   pad_inches=0,
                   bbox_inches='tight')
    pyplot.close()
def _run(input_warning_file_name, top_tracking_dir_name, spc_date_string,
         max_distance_metres, min_lifetime_fraction, output_warning_file_name):
    """Links each NWS tornado warning to nearest storm.

    This is effectively the main method.

    :param input_warning_file_name: See documentation at top of file.
    :param top_tracking_dir_name: Same.
    :param spc_date_string: Same.
    :param max_distance_metres: Same.
    :param min_lifetime_fraction: Same.
    :param output_warning_file_name: Same.
    """

    error_checking.assert_is_greater(max_distance_metres, 0.)
    error_checking.assert_is_greater(min_lifetime_fraction, 0.)
    error_checking.assert_is_leq(min_lifetime_fraction, 1.)

    print('Reading warnings from: "{0:s}"...'.format(input_warning_file_name))
    this_file_handle = open(input_warning_file_name, 'rb')
    warning_table = pickle.load(this_file_handle)
    this_file_handle.close()

    date_start_time_unix_sec = (
        time_conversion.get_start_of_spc_date(spc_date_string))
    date_end_time_unix_sec = (
        time_conversion.get_end_of_spc_date(spc_date_string))
    warning_table = warning_table.loc[
        (warning_table[WARNING_START_TIME_KEY] >= date_start_time_unix_sec)
        & (warning_table[WARNING_START_TIME_KEY] <= date_end_time_unix_sec)]
    num_warnings = len(warning_table.index)

    print('Number of warnings beginning on SPC date "{0:s}" = {1:d}'.format(
        spc_date_string, num_warnings))

    warning_polygon_objects_xy = [None] * num_warnings
    nested_array = warning_table[[
        WARNING_START_TIME_KEY, WARNING_START_TIME_KEY
    ]].values.tolist()

    warning_table = warning_table.assign(
        **{
            WARNING_XY_POLYGON_KEY: warning_polygon_objects_xy,
            LINKED_SECONDARY_IDS_KEY: nested_array
        })

    for k in range(num_warnings):
        warning_table[LINKED_SECONDARY_IDS_KEY].values[k] = []

        this_object_latlng = warning_table[WARNING_LATLNG_POLYGON_KEY].values[
            k]

        warning_table[WARNING_XY_POLYGON_KEY].values[k], _ = (
            polygons.project_latlng_to_xy(
                polygon_object_latlng=this_object_latlng,
                projection_object=PROJECTION_OBJECT))

    tracking_file_names = []

    for i in [-1, 0, 1]:
        this_spc_date_string = time_conversion.time_to_spc_date_string(
            date_start_time_unix_sec + i * NUM_SECONDS_PER_DAY)

        # tracking_file_names += tracking_io.find_files_one_spc_date(
        #     top_tracking_dir_name=top_tracking_dir_name,
        #     tracking_scale_metres2=DUMMY_TRACKING_SCALE_METRES2,
        #     source_name=tracking_utils.SEGMOTION_NAME,
        #     spc_date_string=this_spc_date_string,
        #     raise_error_if_missing=i == 0
        # )[0]

        tracking_file_names += tracking_io.find_files_one_spc_date(
            top_tracking_dir_name=top_tracking_dir_name,
            tracking_scale_metres2=DUMMY_TRACKING_SCALE_METRES2,
            source_name=tracking_utils.SEGMOTION_NAME,
            spc_date_string=this_spc_date_string,
            raise_error_if_missing=False)[0]

    if len(tracking_file_names) == 0:
        _write_linked_warnings(warning_table=warning_table,
                               output_file_name=output_warning_file_name)

        return

    print(SEPARATOR_STRING)
    storm_object_table = tracking_io.read_many_files(tracking_file_names)
    print(SEPARATOR_STRING)

    if len(storm_object_table.index) == 0:
        _write_linked_warnings(warning_table=warning_table,
                               output_file_name=output_warning_file_name)

        return

    storm_object_table = linkage._project_storms_latlng_to_xy(
        storm_object_table=storm_object_table,
        projection_object=PROJECTION_OBJECT)

    for k in range(num_warnings):
        this_start_time_string = time_conversion.unix_sec_to_string(
            warning_table[WARNING_START_TIME_KEY].values[k],
            LOG_MESSAGE_TIME_FORMAT)

        this_end_time_string = time_conversion.unix_sec_to_string(
            warning_table[WARNING_END_TIME_KEY].values[k],
            LOG_MESSAGE_TIME_FORMAT)

        print('Attempting to link warning from {0:s} to {1:s}...'.format(
            this_start_time_string, this_end_time_string))

        warning_table[LINKED_SECONDARY_IDS_KEY].values[k] = _link_one_warning(
            warning_table=warning_table.iloc[[k]],
            storm_object_table=copy.deepcopy(storm_object_table),
            max_distance_metres=max_distance_metres,
            min_lifetime_fraction=min_lifetime_fraction)

        print('\n')

    _write_linked_warnings(warning_table=warning_table,
                           output_file_name=output_warning_file_name)
def _interp_soundings(spc_date_string, lead_times_seconds,
                      lag_time_for_convective_contamination_sec,
                      top_ruc_directory_name, top_rap_directory_name,
                      top_tracking_dir_name, tracking_scale_metres2,
                      top_output_dir_name):
    """Interpolates NWP sounding to each storm object at each lead time.

    :param spc_date_string: See documentation at top of file.
    :param lead_times_seconds: Same.
    :param lag_time_for_convective_contamination_sec: Same.
    :param top_ruc_directory_name: Same.
    :param top_rap_directory_name: Same.
    :param top_tracking_dir_name: Same.
    :param tracking_scale_metres2: Same.
    :param top_output_dir_name: Same.
    :raises: ValueError: if model-initialization times needed are on opposite
        sides of 0000 UTC 1 May 2012 (the cutoff between RUC and RAP models).
    """

    lead_times_seconds = numpy.array(lead_times_seconds, dtype=int)

    tracking_file_names = tracking_io.find_files_one_spc_date(
        spc_date_string=spc_date_string,
        source_name=tracking_utils.SEGMOTION_NAME,
        top_tracking_dir_name=top_tracking_dir_name,
        tracking_scale_metres2=tracking_scale_metres2)[0]

    storm_object_table = tracking_io.read_many_files(tracking_file_names)
    print(SEPARATOR_STRING)

    first_storm_time_unix_sec = numpy.min(
        storm_object_table[tracking_utils.VALID_TIME_COLUMN].values)
    last_storm_time_unix_sec = numpy.max(
        storm_object_table[tracking_utils.VALID_TIME_COLUMN].values)

    first_init_time_unix_sec = number_rounding.floor_to_nearest(
        (first_storm_time_unix_sec + numpy.min(lead_times_seconds) -
         lag_time_for_convective_contamination_sec), HOURS_TO_SECONDS)

    last_init_time_unix_sec = number_rounding.floor_to_nearest(
        (last_storm_time_unix_sec + numpy.max(lead_times_seconds) -
         lag_time_for_convective_contamination_sec), HOURS_TO_SECONDS)

    extreme_init_times_unix_sec = numpy.array(
        [first_init_time_unix_sec, last_init_time_unix_sec], dtype=int)

    if numpy.all(extreme_init_times_unix_sec < FIRST_RAP_TIME_UNIX_SEC):
        top_grib_directory_name = top_ruc_directory_name
        model_name = nwp_model_utils.RUC_MODEL_NAME

    elif numpy.all(extreme_init_times_unix_sec >= FIRST_RAP_TIME_UNIX_SEC):
        top_grib_directory_name = top_rap_directory_name
        model_name = nwp_model_utils.RAP_MODEL_NAME

    else:
        first_storm_time_string = time_conversion.unix_sec_to_string(
            first_storm_time_unix_sec, STORM_TIME_FORMAT)
        last_storm_time_string = time_conversion.unix_sec_to_string(
            last_storm_time_unix_sec, STORM_TIME_FORMAT)
        first_init_time_string = time_conversion.unix_sec_to_string(
            first_init_time_unix_sec, MODEL_INIT_TIME_FORMAT)
        last_init_time_string = time_conversion.unix_sec_to_string(
            last_init_time_unix_sec, MODEL_INIT_TIME_FORMAT)

        error_string = (
            'First and last storm times are {0:s} and {1:s}.  Thus, first and '
            'last model-initialization times needed are {2:s} and {3:s}, which '
            'are on opposite sides of {4:s} (the cutoff between RUC and RAP '
            'models).  The code is not generalized enough to interp data from '
            'two different models.  Sorry, eh?').format(
                first_storm_time_string, last_storm_time_string,
                first_init_time_string, last_init_time_string,
                FIRST_RAP_TIME_STRING)

        raise ValueError(error_string)

    sounding_dict_by_lead_time = soundings.interp_soundings_to_storm_objects(
        storm_object_table=storm_object_table,
        top_grib_directory_name=top_grib_directory_name,
        model_name=model_name,
        use_all_grids=True,
        height_levels_m_agl=soundings.DEFAULT_HEIGHT_LEVELS_M_AGL,
        lead_times_seconds=lead_times_seconds,
        lag_time_for_convective_contamination_sec=
        lag_time_for_convective_contamination_sec,
        wgrib_exe_name=WGRIB_EXE_NAME,
        wgrib2_exe_name=WGRIB2_EXE_NAME,
        raise_error_if_missing=False)

    print(SEPARATOR_STRING)
    num_lead_times = len(lead_times_seconds)

    for k in range(num_lead_times):
        this_sounding_file_name = soundings.find_sounding_file(
            top_directory_name=top_output_dir_name,
            spc_date_string=spc_date_string,
            lead_time_seconds=lead_times_seconds[k],
            lag_time_for_convective_contamination_sec=
            lag_time_for_convective_contamination_sec,
            raise_error_if_missing=False)

        print(
            'Writing soundings to: "{0:s}"...'.format(this_sounding_file_name))

        soundings.write_soundings(
            netcdf_file_name=this_sounding_file_name,
            sounding_dict_height_coords=sounding_dict_by_lead_time[k],
            lead_time_seconds=lead_times_seconds[k],
            lag_time_for_convective_contamination_sec=
            lag_time_for_convective_contamination_sec)
Beispiel #20
0
def _find_io_files_for_renaming(top_input_dir_name, first_date_unix_sec,
                                last_date_unix_sec, top_output_dir_name):
    """Finds input and output files for renaming storms.

    N = number of dates

    :param top_input_dir_name: See documentation for `rename_storms.`
    :param first_date_unix_sec: Same.
    :param last_date_unix_sec: Same.
    :param top_output_dir_name: Same.
    :return: input_file_names_by_date: length-N list, where the [i]th item is a
        numpy array of paths to input files for the [i]th date.
    :return: output_file_names_by_date: Same as above, but for output files.
    :return: valid_times_by_date_unix_sec: Same as above, but for valid times.
        All 3 arrays for the [i]th date have the same length.
    """

    dates_unix_sec = time_periods.range_and_interval_to_list(
        start_time_unix_sec=first_date_unix_sec,
        end_time_unix_sec=last_date_unix_sec,
        time_interval_sec=DAYS_TO_SECONDS,
        include_endpoint=True)

    date_strings = [
        time_conversion.unix_sec_to_string(t, DATE_FORMAT)
        for t in dates_unix_sec
    ]

    num_dates = len(date_strings)
    input_file_names_by_date = [numpy.array([], dtype=object)] * num_dates
    output_file_names_by_date = [numpy.array([], dtype=object)] * num_dates
    valid_times_by_date_unix_sec = [numpy.array([], dtype=int)] * num_dates

    for i in range(num_dates):
        print('Finding input files for date {0:s}...'.format(date_strings[i]))

        these_input_file_names = tracking_io.find_files_one_spc_date(
            spc_date_string=date_strings[i],
            source_name=tracking_utils.PROBSEVERE_NAME,
            top_tracking_dir_name=top_input_dir_name,
            tracking_scale_metres2=DUMMY_TRACKING_SCALE_METRES2,
            raise_error_if_missing=True)[0]

        these_input_file_names.sort()
        these_valid_times_unix_sec = numpy.array(
            [tracking_io.file_name_to_time(f) for f in these_input_file_names],
            dtype=int)

        these_output_file_names = []
        for t in these_valid_times_unix_sec:
            these_output_file_names.append(
                tracking_io.find_file(
                    valid_time_unix_sec=t,
                    source_name=tracking_utils.PROBSEVERE_NAME,
                    top_tracking_dir_name=top_output_dir_name,
                    tracking_scale_metres2=DUMMY_TRACKING_SCALE_METRES2,
                    raise_error_if_missing=False))

        input_file_names_by_date[i] = numpy.array(these_input_file_names,
                                                  dtype=object)
        output_file_names_by_date[i] = numpy.array(these_output_file_names,
                                                   dtype=object)
        valid_times_by_date_unix_sec[i] = these_valid_times_unix_sec

    print(SEPARATOR_STRING)

    return (input_file_names_by_date, output_file_names_by_date,
            valid_times_by_date_unix_sec)