예제 #1
0
def _run(top_tracking_dir_name, first_spc_date_string, last_spc_date_string,
         top_myrorss_dir_name, radar_field_name, output_file_name):
    """Evaluates a set of storm tracks.

    This is effectively the main method.

    :param top_tracking_dir_name: See documentation at top of file.
    :param first_spc_date_string: Same.
    :param last_spc_date_string: Same.
    :param top_myrorss_dir_name: Same.
    :param radar_field_name: Same.
    :param output_file_name: Same.
    """

    spc_date_strings = time_conversion.get_spc_dates_in_range(
        first_spc_date_string=first_spc_date_string,
        last_spc_date_string=last_spc_date_string)

    list_of_storm_object_tables = []

    for this_spc_date_string in spc_date_strings:
        these_file_names = tracking_io.find_files_one_spc_date(
            top_tracking_dir_name=top_tracking_dir_name,
            tracking_scale_metres2=echo_top_tracking.
            DUMMY_TRACKING_SCALE_METRES2,
            source_name=tracking_utils.SEGMOTION_NAME,
            spc_date_string=this_spc_date_string,
            raise_error_if_missing=False)[0]

        if len(these_file_names) == 0:
            continue

        this_storm_object_table = tracking_io.read_many_files(
            these_file_names)[STORM_OBJECT_COLUMNS]

        list_of_storm_object_tables.append(this_storm_object_table)

        if this_spc_date_string != spc_date_strings[-1]:
            print(MINOR_SEPARATOR_STRING)

        if len(list_of_storm_object_tables) == 1:
            continue

        list_of_storm_object_tables[-1] = list_of_storm_object_tables[
            -1].align(list_of_storm_object_tables[0], axis=1)[0]

    print(SEPARATOR_STRING)

    storm_object_table = pandas.concat(list_of_storm_object_tables,
                                       axis=0,
                                       ignore_index=True)

    evaluation_dict = tracking_eval.evaluate_tracks(
        storm_object_table=storm_object_table,
        top_myrorss_dir_name=top_myrorss_dir_name,
        radar_field_name=radar_field_name)

    print('Writing results to: "{0:s}"...'.format(output_file_name))
    tracking_eval.write_file(evaluation_dict=evaluation_dict,
                             pickle_file_name=output_file_name)
def _compute_radar_stats_from_gridrad(spc_date_string, top_tracking_dir_name,
                                      tracking_scale_metres2,
                                      top_gridrad_dir_name, output_dir_name):
    """Uses GridRad data to compute radar statistics for each storm object.

    :param spc_date_string: SPC (Storm Prediction Center) date in format
        "yyyymmdd".  Radar stats will be computed for all storm objects on this
        date.
    :param top_tracking_dir_name: Name of top-level directory with storm-
        tracking files.  Storm objects will be read from here.
    :param tracking_scale_metres2: Tracking scale (minimum storm area).  Will be
        used to find tracking files.
    :param top_gridrad_dir_name: Name of top-level directory with GridRad files.
    :param output_dir_name: Name of output directory.  A single Pickle file,
        with radar stats for each storm object, will be written here.
    """

    file_system_utils.mkdir_recursive_if_necessary(
        directory_name=output_dir_name)

    tracking_file_names = tracking_io.find_files_one_spc_date(
        spc_date_string=spc_date_string,
        source_name=tracking_utils.SEGMOTION_NAME,
        top_tracking_dir_name=top_tracking_dir_name,
        tracking_scale_metres2=tracking_scale_metres2)[0]

    storm_object_table = tracking_io.read_many_files(tracking_file_names)
    print(SEPARATOR_STRING)

    storm_object_statistic_table = (
        radar_statistics.get_storm_based_radar_stats_gridrad(
            storm_object_table=storm_object_table,
            top_radar_dir_name=top_gridrad_dir_name))
    print(SEPARATOR_STRING)

    output_file_name = '{0:s}/radar_stats_for_storm_objects_{1:s}.p'.format(
        output_dir_name, spc_date_string)

    print('Writing radar statistics to file: "{0:s}"...'.format(
        output_file_name))

    radar_statistics.write_stats_for_storm_objects(
        storm_object_statistic_table, output_file_name)
def _compute_shape_stats(spc_date_string, top_tracking_dir_name,
                         tracking_scale_metres2, output_dir_name):
    """Computes shape statistics for each storm object.

    :param spc_date_string: SPC (Storm Prediction Center) date in format
        "yyyymmdd".  Shape statistics will be computed for all storm objects on
        this date.
    :param top_tracking_dir_name: Name of top-level directory with storm-
        tracking data.
    :param tracking_scale_metres2: Tracking scale (minimum storm area).  Will be
        used to find input data.
    :param output_dir_name: Name of output directory.  A single Pickle file,
        with shape statistics for each storm object, will be written here.
    """

    tracking_file_names = tracking_io.find_files_one_spc_date(
        spc_date_string=spc_date_string,
        source_name=tracking_utils.SEGMOTION_NAME,
        top_tracking_dir_name=top_tracking_dir_name,
        tracking_scale_metres2=tracking_scale_metres2)[0]

    storm_object_table = tracking_io.read_many_files(tracking_file_names)
    print(SEPARATOR_STRING)

    shape_statistic_table = shape_stats.get_stats_for_storm_objects(
        storm_object_table)
    print(SEPARATOR_STRING)

    shape_statistic_file_name = '{0:s}/shape_statistics_{1:s}.p'.format(
        output_dir_name, spc_date_string)

    print('Writing shape statistics to: "{0:s}"...'.format(
        shape_statistic_file_name))

    shape_stats.write_stats_for_storm_objects(shape_statistic_table,
                                              shape_statistic_file_name)
def _interp_soundings(spc_date_string, lead_times_seconds,
                      lag_time_for_convective_contamination_sec,
                      top_ruc_directory_name, top_rap_directory_name,
                      top_tracking_dir_name, tracking_scale_metres2,
                      top_output_dir_name):
    """Interpolates NWP sounding to each storm object at each lead time.

    :param spc_date_string: See documentation at top of file.
    :param lead_times_seconds: Same.
    :param lag_time_for_convective_contamination_sec: Same.
    :param top_ruc_directory_name: Same.
    :param top_rap_directory_name: Same.
    :param top_tracking_dir_name: Same.
    :param tracking_scale_metres2: Same.
    :param top_output_dir_name: Same.
    :raises: ValueError: if model-initialization times needed are on opposite
        sides of 0000 UTC 1 May 2012 (the cutoff between RUC and RAP models).
    """

    lead_times_seconds = numpy.array(lead_times_seconds, dtype=int)

    tracking_file_names = tracking_io.find_files_one_spc_date(
        spc_date_string=spc_date_string,
        source_name=tracking_utils.SEGMOTION_NAME,
        top_tracking_dir_name=top_tracking_dir_name,
        tracking_scale_metres2=tracking_scale_metres2)[0]

    storm_object_table = tracking_io.read_many_files(tracking_file_names)
    print(SEPARATOR_STRING)

    first_storm_time_unix_sec = numpy.min(
        storm_object_table[tracking_utils.VALID_TIME_COLUMN].values)
    last_storm_time_unix_sec = numpy.max(
        storm_object_table[tracking_utils.VALID_TIME_COLUMN].values)

    first_init_time_unix_sec = number_rounding.floor_to_nearest(
        (first_storm_time_unix_sec + numpy.min(lead_times_seconds) -
         lag_time_for_convective_contamination_sec), HOURS_TO_SECONDS)

    last_init_time_unix_sec = number_rounding.floor_to_nearest(
        (last_storm_time_unix_sec + numpy.max(lead_times_seconds) -
         lag_time_for_convective_contamination_sec), HOURS_TO_SECONDS)

    extreme_init_times_unix_sec = numpy.array(
        [first_init_time_unix_sec, last_init_time_unix_sec], dtype=int)

    if numpy.all(extreme_init_times_unix_sec < FIRST_RAP_TIME_UNIX_SEC):
        top_grib_directory_name = top_ruc_directory_name
        model_name = nwp_model_utils.RUC_MODEL_NAME

    elif numpy.all(extreme_init_times_unix_sec >= FIRST_RAP_TIME_UNIX_SEC):
        top_grib_directory_name = top_rap_directory_name
        model_name = nwp_model_utils.RAP_MODEL_NAME

    else:
        first_storm_time_string = time_conversion.unix_sec_to_string(
            first_storm_time_unix_sec, STORM_TIME_FORMAT)
        last_storm_time_string = time_conversion.unix_sec_to_string(
            last_storm_time_unix_sec, STORM_TIME_FORMAT)
        first_init_time_string = time_conversion.unix_sec_to_string(
            first_init_time_unix_sec, MODEL_INIT_TIME_FORMAT)
        last_init_time_string = time_conversion.unix_sec_to_string(
            last_init_time_unix_sec, MODEL_INIT_TIME_FORMAT)

        error_string = (
            'First and last storm times are {0:s} and {1:s}.  Thus, first and '
            'last model-initialization times needed are {2:s} and {3:s}, which '
            'are on opposite sides of {4:s} (the cutoff between RUC and RAP '
            'models).  The code is not generalized enough to interp data from '
            'two different models.  Sorry, eh?').format(
                first_storm_time_string, last_storm_time_string,
                first_init_time_string, last_init_time_string,
                FIRST_RAP_TIME_STRING)

        raise ValueError(error_string)

    sounding_dict_by_lead_time = soundings.interp_soundings_to_storm_objects(
        storm_object_table=storm_object_table,
        top_grib_directory_name=top_grib_directory_name,
        model_name=model_name,
        use_all_grids=True,
        height_levels_m_agl=soundings.DEFAULT_HEIGHT_LEVELS_M_AGL,
        lead_times_seconds=lead_times_seconds,
        lag_time_for_convective_contamination_sec=
        lag_time_for_convective_contamination_sec,
        wgrib_exe_name=WGRIB_EXE_NAME,
        wgrib2_exe_name=WGRIB2_EXE_NAME,
        raise_error_if_missing=False)

    print(SEPARATOR_STRING)
    num_lead_times = len(lead_times_seconds)

    for k in range(num_lead_times):
        this_sounding_file_name = soundings.find_sounding_file(
            top_directory_name=top_output_dir_name,
            spc_date_string=spc_date_string,
            lead_time_seconds=lead_times_seconds[k],
            lag_time_for_convective_contamination_sec=
            lag_time_for_convective_contamination_sec,
            raise_error_if_missing=False)

        print(
            'Writing soundings to: "{0:s}"...'.format(this_sounding_file_name))

        soundings.write_soundings(
            netcdf_file_name=this_sounding_file_name,
            sounding_dict_height_coords=sounding_dict_by_lead_time[k],
            lead_time_seconds=lead_times_seconds[k],
            lag_time_for_convective_contamination_sec=
            lag_time_for_convective_contamination_sec)
def _run(top_orig_tracking_dir_name, top_new_tracking_dir_name,
         first_spc_date_string, last_spc_date_string, output_file_name):
    """Plots storms that were removed by remove_storms_outside_conus.py.

    This is effectively the main method.

    :param top_orig_tracking_dir_name: See documentation at top of file.
    :param top_new_tracking_dir_name: Same.
    :param first_spc_date_string: Same.
    :param last_spc_date_string: Same.
    :param output_file_name: Same.
    """

    file_system_utils.mkdir_recursive_if_necessary(file_name=output_file_name)

    spc_date_strings = time_conversion.get_spc_dates_in_range(
        first_spc_date_string=first_spc_date_string,
        last_spc_date_string=last_spc_date_string)

    orig_tracking_file_names = []

    for d in spc_date_strings:
        orig_tracking_file_names += tracking_io.find_files_one_spc_date(
            top_tracking_dir_name=top_orig_tracking_dir_name,
            tracking_scale_metres2=DUMMY_TRACKING_SCALE_METRES2,
            source_name=tracking_utils.SEGMOTION_NAME,
            spc_date_string=d,
            raise_error_if_missing=False)[0]

    valid_times_unix_sec = numpy.array(
        [tracking_io.file_name_to_time(f) for f in orig_tracking_file_names],
        dtype=int)

    new_tracking_file_names = [
        tracking_io.find_file(
            top_tracking_dir_name=top_new_tracking_dir_name,
            tracking_scale_metres2=DUMMY_TRACKING_SCALE_METRES2,
            source_name=tracking_utils.SEGMOTION_NAME,
            valid_time_unix_sec=t,
            spc_date_string=time_conversion.time_to_spc_date_string(t),
            raise_error_if_missing=True) for t in valid_times_unix_sec
    ]

    orig_storm_object_table = tracking_io.read_many_files(
        orig_tracking_file_names)
    print(SEPARATOR_STRING)

    new_storm_object_table = tracking_io.read_many_files(
        new_tracking_file_names)
    print(SEPARATOR_STRING)

    orig_storm_id_strings = (
        orig_storm_object_table[tracking_utils.FULL_ID_COLUMN].values.tolist())
    orig_storm_times_unix_sec = (
        orig_storm_object_table[tracking_utils.VALID_TIME_COLUMN].values)
    new_storm_id_strings = (
        new_storm_object_table[tracking_utils.FULL_ID_COLUMN].values.tolist())
    new_storm_times_unix_sec = (
        new_storm_object_table[tracking_utils.VALID_TIME_COLUMN].values)

    num_orig_storm_objects = len(orig_storm_object_table.index)
    orig_kept_flags = numpy.full(num_orig_storm_objects, 0, dtype=bool)

    these_indices = tracking_utils.find_storm_objects(
        all_id_strings=orig_storm_id_strings,
        all_times_unix_sec=orig_storm_times_unix_sec,
        id_strings_to_keep=new_storm_id_strings,
        times_to_keep_unix_sec=new_storm_times_unix_sec,
        allow_missing=False)

    orig_kept_flags[these_indices] = True
    orig_removed_indices = numpy.where(numpy.invert(orig_kept_flags))[0]
    print('{0:d} of {1:d} storm objects were outside CONUS.'.format(
        len(orig_removed_indices), num_orig_storm_objects))

    removed_storm_object_table = orig_storm_object_table.iloc[
        orig_removed_indices]
    removed_latitudes_deg = removed_storm_object_table[
        tracking_utils.CENTROID_LATITUDE_COLUMN].values

    removed_longitudes_deg = removed_storm_object_table[
        tracking_utils.CENTROID_LONGITUDE_COLUMN].values

    figure_object, axes_object, basemap_object = (
        plotting_utils.create_equidist_cylindrical_map(
            min_latitude_deg=numpy.min(removed_latitudes_deg) - 1.,
            max_latitude_deg=numpy.max(removed_latitudes_deg) + 1.,
            min_longitude_deg=numpy.min(removed_longitudes_deg) - 1.,
            max_longitude_deg=numpy.max(removed_longitudes_deg) + 1.,
            resolution_string='i'))

    plotting_utils.plot_coastlines(basemap_object=basemap_object,
                                   axes_object=axes_object,
                                   line_colour=BORDER_COLOUR)
    plotting_utils.plot_countries(basemap_object=basemap_object,
                                  axes_object=axes_object,
                                  line_colour=BORDER_COLOUR)
    plotting_utils.plot_states_and_provinces(basemap_object=basemap_object,
                                             axes_object=axes_object,
                                             line_colour=BORDER_COLOUR)
    plotting_utils.plot_parallels(basemap_object=basemap_object,
                                  axes_object=axes_object,
                                  num_parallels=NUM_PARALLELS)
    plotting_utils.plot_meridians(basemap_object=basemap_object,
                                  axes_object=axes_object,
                                  num_meridians=NUM_MERIDIANS)

    conus_latitudes_deg, conus_longitudes_deg = (
        conus_boundary.read_from_netcdf())
    conus_latitudes_deg, conus_longitudes_deg = conus_boundary.erode_boundary(
        latitudes_deg=conus_latitudes_deg,
        longitudes_deg=conus_longitudes_deg,
        erosion_distance_metres=EROSION_DISTANCE_METRES)

    axes_object.plot(conus_longitudes_deg,
                     conus_latitudes_deg,
                     color=LINE_COLOUR,
                     linestyle='solid',
                     linewidth=LINE_WIDTH)
    axes_object.plot(removed_longitudes_deg,
                     removed_latitudes_deg,
                     linestyle='None',
                     marker=MARKER_TYPE,
                     markersize=MARKER_SIZE,
                     markeredgewidth=0,
                     markerfacecolor=MARKER_COLOUR,
                     markeredgecolor=MARKER_COLOUR)

    print('Saving figure to: "{0:s}"...'.format(output_file_name))
    figure_object.savefig(output_file_name,
                          dpi=FIGURE_RESOLUTION_DPI,
                          pad_inches=0,
                          bbox_inches='tight')
    pyplot.close(figure_object)
예제 #6
0
def _run(top_tracking_dir_name, first_spc_date_string, last_spc_date_string,
         colour_map_name, min_plot_latitude_deg, max_plot_latitude_deg,
         min_plot_longitude_deg, max_plot_longitude_deg, output_file_name):
    """Plots storm tracks for a continuous time period.

    This is effectively the main method.

    :param top_tracking_dir_name: See documentation at top of file.
    :param first_spc_date_string: Same.
    :param last_spc_date_string: Same.
    :param colour_map_name: Same.
    :param min_plot_latitude_deg: Same.
    :param max_plot_latitude_deg: Same.
    :param min_plot_longitude_deg: Same.
    :param max_plot_longitude_deg: Same.
    :param output_file_name: Same.
    """

    if colour_map_name in ['', 'None']:
        colour_map_object = 'random'
    else:
        colour_map_object = pyplot.cm.get_cmap(colour_map_name)

    if min_plot_latitude_deg <= SENTINEL_VALUE:
        min_plot_latitude_deg = None
    if max_plot_latitude_deg <= SENTINEL_VALUE:
        max_plot_latitude_deg = None
    if min_plot_longitude_deg <= SENTINEL_VALUE:
        min_plot_longitude_deg = None
    if max_plot_longitude_deg <= SENTINEL_VALUE:
        max_plot_longitude_deg = None

    file_system_utils.mkdir_recursive_if_necessary(file_name=output_file_name)

    spc_date_strings = time_conversion.get_spc_dates_in_range(
        first_spc_date_string=first_spc_date_string,
        last_spc_date_string=last_spc_date_string)

    list_of_storm_object_tables = []

    for this_spc_date_string in spc_date_strings:
        these_file_names = tracking_io.find_files_one_spc_date(
            top_tracking_dir_name=top_tracking_dir_name,
            tracking_scale_metres2=echo_top_tracking.
            DUMMY_TRACKING_SCALE_METRES2,
            source_name=tracking_utils.SEGMOTION_NAME,
            spc_date_string=this_spc_date_string,
            raise_error_if_missing=False)[0]

        if len(these_file_names) == 0:
            continue

        this_storm_object_table = tracking_io.read_many_files(
            these_file_names)[REQUIRED_COLUMNS]

        list_of_storm_object_tables.append(this_storm_object_table)

        if this_spc_date_string != spc_date_strings[-1]:
            print(MINOR_SEPARATOR_STRING)

        if len(list_of_storm_object_tables) == 1:
            continue

        list_of_storm_object_tables[-1] = list_of_storm_object_tables[
            -1].align(list_of_storm_object_tables[0], axis=1)[0]

    print(SEPARATOR_STRING)
    storm_object_table = pandas.concat(list_of_storm_object_tables,
                                       axis=0,
                                       ignore_index=True)

    if min_plot_latitude_deg is None:
        min_plot_latitude_deg = numpy.min(
            storm_object_table[tracking_utils.CENTROID_LATITUDE_COLUMN].values
        ) - LATLNG_BUFFER_DEG

    if max_plot_latitude_deg is None:
        max_plot_latitude_deg = numpy.max(
            storm_object_table[tracking_utils.CENTROID_LATITUDE_COLUMN].values
        ) + LATLNG_BUFFER_DEG

    if min_plot_longitude_deg is None:
        min_plot_longitude_deg = numpy.min(
            storm_object_table[tracking_utils.CENTROID_LONGITUDE_COLUMN].values
        ) - LATLNG_BUFFER_DEG

    if max_plot_longitude_deg is None:
        max_plot_longitude_deg = numpy.max(
            storm_object_table[tracking_utils.CENTROID_LONGITUDE_COLUMN].values
        ) + LATLNG_BUFFER_DEG

    _, axes_object, basemap_object = (
        plotting_utils.create_equidist_cylindrical_map(
            min_latitude_deg=min_plot_latitude_deg,
            max_latitude_deg=max_plot_latitude_deg,
            min_longitude_deg=min_plot_longitude_deg,
            max_longitude_deg=max_plot_longitude_deg,
            resolution_string='i'))

    plotting_utils.plot_coastlines(basemap_object=basemap_object,
                                   axes_object=axes_object,
                                   line_colour=BORDER_COLOUR)

    plotting_utils.plot_countries(basemap_object=basemap_object,
                                  axes_object=axes_object,
                                  line_colour=BORDER_COLOUR)

    plotting_utils.plot_states_and_provinces(basemap_object=basemap_object,
                                             axes_object=axes_object,
                                             line_colour=BORDER_COLOUR)

    plotting_utils.plot_parallels(basemap_object=basemap_object,
                                  axes_object=axes_object,
                                  num_parallels=NUM_PARALLELS)

    plotting_utils.plot_meridians(basemap_object=basemap_object,
                                  axes_object=axes_object,
                                  num_meridians=NUM_MERIDIANS)

    storm_plotting.plot_storm_tracks(storm_object_table=storm_object_table,
                                     axes_object=axes_object,
                                     basemap_object=basemap_object,
                                     colour_map_object=colour_map_object)

    print('Saving figure to: "{0:s}"...'.format(output_file_name))
    pyplot.savefig(output_file_name, dpi=FIGURE_RESOLUTION_DPI)
    pyplot.close()
def _run(top_tracking_dir_name, first_spc_date_string, last_spc_date_string,
         colour_map_name, min_plot_latitude_deg, max_plot_latitude_deg,
         min_plot_longitude_deg, max_plot_longitude_deg, output_file_name):
    """Plots storm tracks for a continuous time period.

    This is effectively the main method.

    :param top_tracking_dir_name: See documentation at top of file.
    :param first_spc_date_string: Same.
    :param last_spc_date_string: Same.
    :param colour_map_name: Same.
    :param min_plot_latitude_deg: Same.
    :param max_plot_latitude_deg: Same.
    :param min_plot_longitude_deg: Same.
    :param max_plot_longitude_deg: Same.
    :param output_file_name: Same.
    """

    if colour_map_name in ['', 'None']:
        colour_map_object = 'random'
    else:
        colour_map_object = pyplot.cm.get_cmap(colour_map_name)

    if min_plot_latitude_deg <= SENTINEL_VALUE:
        min_plot_latitude_deg = None
    if max_plot_latitude_deg <= SENTINEL_VALUE:
        max_plot_latitude_deg = None
    if min_plot_longitude_deg <= SENTINEL_VALUE:
        min_plot_longitude_deg = None
    if max_plot_longitude_deg <= SENTINEL_VALUE:
        max_plot_longitude_deg = None

    file_system_utils.mkdir_recursive_if_necessary(file_name=output_file_name)

    spc_date_strings = time_conversion.get_spc_dates_in_range(
        first_spc_date_string=first_spc_date_string,
        last_spc_date_string=last_spc_date_string)

    list_of_storm_object_tables = []

    for this_spc_date_string in spc_date_strings:
        these_file_names = tracking_io.find_files_one_spc_date(
            top_tracking_dir_name=top_tracking_dir_name,
            tracking_scale_metres2=echo_top_tracking.
            DUMMY_TRACKING_SCALE_METRES2,
            source_name=tracking_utils.SEGMOTION_NAME,
            spc_date_string=this_spc_date_string,
            raise_error_if_missing=False)[0]

        if len(these_file_names) == 0:
            continue

        this_storm_object_table = tracking_io.read_many_files(
            these_file_names)[REQUIRED_COLUMNS]

        list_of_storm_object_tables.append(this_storm_object_table)

        if this_spc_date_string != spc_date_strings[-1]:
            print(MINOR_SEPARATOR_STRING)

        if len(list_of_storm_object_tables) == 1:
            continue

        list_of_storm_object_tables[-1] = list_of_storm_object_tables[
            -1].align(list_of_storm_object_tables[0], axis=1)[0]

    print(SEPARATOR_STRING)
    storm_object_table = pandas.concat(list_of_storm_object_tables,
                                       axis=0,
                                       ignore_index=True)

    # TODO(thunderhoser): HACK
    first_time_unix_sec = time_conversion.string_to_unix_sec(
        '2011-04-27-20', '%Y-%m-%d-%H')
    storm_object_table = storm_object_table.loc[storm_object_table[
        tracking_utils.VALID_TIME_COLUMN] >= first_time_unix_sec]

    if min_plot_latitude_deg is None:
        min_plot_latitude_deg = numpy.min(
            storm_object_table[tracking_utils.CENTROID_LATITUDE_COLUMN].values
        ) - LATLNG_BUFFER_DEG

    if max_plot_latitude_deg is None:
        max_plot_latitude_deg = numpy.max(
            storm_object_table[tracking_utils.CENTROID_LATITUDE_COLUMN].values
        ) + LATLNG_BUFFER_DEG

    if min_plot_longitude_deg is None:
        min_plot_longitude_deg = numpy.min(
            storm_object_table[tracking_utils.CENTROID_LONGITUDE_COLUMN].values
        ) - LATLNG_BUFFER_DEG

    if max_plot_longitude_deg is None:
        max_plot_longitude_deg = numpy.max(
            storm_object_table[tracking_utils.CENTROID_LONGITUDE_COLUMN].values
        ) + LATLNG_BUFFER_DEG

    _, axes_object, basemap_object = (
        plotting_utils.create_equidist_cylindrical_map(
            min_latitude_deg=min_plot_latitude_deg,
            max_latitude_deg=max_plot_latitude_deg,
            min_longitude_deg=min_plot_longitude_deg,
            max_longitude_deg=max_plot_longitude_deg,
            resolution_string='i'))

    # plotting_utils.plot_coastlines(
    #     basemap_object=basemap_object, axes_object=axes_object,
    #     line_colour=BORDER_COLOUR
    # )
    plotting_utils.plot_countries(basemap_object=basemap_object,
                                  axes_object=axes_object,
                                  line_colour=BORDER_COLOUR)
    plotting_utils.plot_states_and_provinces(basemap_object=basemap_object,
                                             axes_object=axes_object,
                                             line_colour=BORDER_COLOUR)
    plotting_utils.plot_parallels(basemap_object=basemap_object,
                                  axes_object=axes_object,
                                  num_parallels=NUM_PARALLELS,
                                  line_colour=numpy.full(3, 1.))
    plotting_utils.plot_meridians(basemap_object=basemap_object,
                                  axes_object=axes_object,
                                  num_meridians=NUM_MERIDIANS,
                                  line_colour=numpy.full(3, 1.))

    colour_bar_object = storm_plotting.plot_storm_tracks(
        storm_object_table=storm_object_table,
        axes_object=axes_object,
        basemap_object=basemap_object,
        colour_map_object=colour_map_object)

    valid_times_unix_sec = (
        storm_object_table[tracking_utils.VALID_TIME_COLUMN].values)

    # TODO(thunderhoser): HACK
    tick_times_unix_sec = time_periods.range_and_interval_to_list(
        start_time_unix_sec=numpy.min(valid_times_unix_sec),
        end_time_unix_sec=numpy.max(valid_times_unix_sec),
        time_interval_sec=1800,
        include_endpoint=True)
    tick_time_strings = [
        time_conversion.unix_sec_to_string(t, COLOUR_BAR_TIME_FORMAT)
        for t in tick_times_unix_sec
    ]

    colour_bar_object.set_ticks(tick_times_unix_sec)
    colour_bar_object.set_ticklabels(tick_time_strings)

    print('Saving figure to: "{0:s}"...'.format(output_file_name))
    pyplot.savefig(output_file_name,
                   dpi=FIGURE_RESOLUTION_DPI,
                   pad_inches=0,
                   bbox_inches='tight')
    pyplot.close()
예제 #8
0
def _extract_storm_images(
        num_image_rows, num_image_columns, rotate_grids,
        rotated_grid_spacing_metres, radar_field_names, radar_heights_m_agl,
        spc_date_string, top_radar_dir_name, top_tracking_dir_name,
        elevation_dir_name, tracking_scale_metres2, target_name,
        top_target_dir_name, top_output_dir_name):
    """Extracts storm-centered radar images from GridRad data.

    :param num_image_rows: See documentation at top of file.
    :param num_image_columns: Same.
    :param rotate_grids: Same.
    :param rotated_grid_spacing_metres: Same.
    :param radar_field_names: Same.
    :param radar_heights_m_agl: Same.
    :param spc_date_string: Same.
    :param top_radar_dir_name: Same.
    :param top_tracking_dir_name: Same.
    :param elevation_dir_name: Same.
    :param tracking_scale_metres2: Same.
    :param target_name: Same.
    :param top_target_dir_name: Same.
    :param top_output_dir_name: Same.
    """

    if target_name in ['', 'None']:
        target_name = None

    if target_name is not None:
        target_param_dict = target_val_utils.target_name_to_params(target_name)

        target_file_name = target_val_utils.find_target_file(
            top_directory_name=top_target_dir_name,
            event_type_string=target_param_dict[
                target_val_utils.EVENT_TYPE_KEY],
            spc_date_string=spc_date_string)

        print('Reading data from: "{0:s}"...'.format(target_file_name))
        target_dict = target_val_utils.read_target_values(
            netcdf_file_name=target_file_name, target_names=[target_name]
        )

        print('\n')

    # Find storm objects on the given SPC date.
    tracking_file_names = tracking_io.find_files_one_spc_date(
        spc_date_string=spc_date_string,
        source_name=tracking_utils.SEGMOTION_NAME,
        top_tracking_dir_name=top_tracking_dir_name,
        tracking_scale_metres2=tracking_scale_metres2
    )[0]

    # Read storm objects on the given SPC date.
    storm_object_table = tracking_io.read_many_files(
        tracking_file_names
    )[storm_images.STORM_COLUMNS_NEEDED]

    print(SEPARATOR_STRING)

    if target_name is not None:
        print((
            'Removing storm objects without target values (variable = '
            '"{0:s}")...'
        ).format(target_name))

        these_indices = tracking_utils.find_storm_objects(
            all_id_strings=storm_object_table[
                tracking_utils.FULL_ID_COLUMN].values.tolist(),
            all_times_unix_sec=storm_object_table[
                tracking_utils.VALID_TIME_COLUMN].values.astype(int),
            id_strings_to_keep=target_dict[target_val_utils.FULL_IDS_KEY],
            times_to_keep_unix_sec=target_dict[
                target_val_utils.VALID_TIMES_KEY],
            allow_missing=False)

        num_storm_objects_orig = len(storm_object_table.index)
        storm_object_table = storm_object_table.iloc[these_indices]
        num_storm_objects = len(storm_object_table.index)

        print('Removed {0:d} of {1:d} storm objects!\n'.format(
            num_storm_objects_orig - num_storm_objects, num_storm_objects_orig
        ))

    # Extract storm-centered radar images.
    storm_images.extract_storm_images_gridrad(
        storm_object_table=storm_object_table,
        top_radar_dir_name=top_radar_dir_name,
        top_output_dir_name=top_output_dir_name,
        elevation_dir_name=elevation_dir_name,
        num_storm_image_rows=num_image_rows,
        num_storm_image_columns=num_image_columns, rotate_grids=rotate_grids,
        rotated_grid_spacing_metres=rotated_grid_spacing_metres,
        radar_field_names=radar_field_names,
        radar_heights_m_agl=radar_heights_m_agl)
예제 #9
0
def _run(top_tracking_dir_name, first_spc_date_string, last_spc_date_string,
         storm_colour, storm_opacity, include_secondary_ids,
         min_plot_latitude_deg, max_plot_latitude_deg, min_plot_longitude_deg,
         max_plot_longitude_deg, top_myrorss_dir_name, radar_field_name,
         radar_height_m_asl, output_dir_name):
    """Plots storm outlines (along with IDs) at each time step.

    This is effectively the main method.

    :param top_tracking_dir_name: See documentation at top of file.
    :param first_spc_date_string: Same.
    :param last_spc_date_string: Same.
    :param storm_colour: Same.
    :param storm_opacity: Same.
    :param include_secondary_ids: Same.
    :param min_plot_latitude_deg: Same.
    :param max_plot_latitude_deg: Same.
    :param min_plot_longitude_deg: Same.
    :param max_plot_longitude_deg: Same.
    :param top_myrorss_dir_name: Same.
    :param radar_field_name: Same.
    :param radar_height_m_asl: Same.
    :param output_dir_name: Same.
    """

    if top_myrorss_dir_name in ['', 'None']:
        top_myrorss_dir_name = None

    if radar_field_name != radar_utils.REFL_NAME:
        radar_height_m_asl = None

    file_system_utils.mkdir_recursive_if_necessary(
        directory_name=output_dir_name)

    spc_date_strings = time_conversion.get_spc_dates_in_range(
        first_spc_date_string=first_spc_date_string,
        last_spc_date_string=last_spc_date_string)

    tracking_file_names = []

    for this_spc_date_string in spc_date_strings:
        tracking_file_names += (tracking_io.find_files_one_spc_date(
            top_tracking_dir_name=top_tracking_dir_name,
            tracking_scale_metres2=DUMMY_TRACKING_SCALE_METRES2,
            source_name=DUMMY_SOURCE_NAME,
            spc_date_string=this_spc_date_string,
            raise_error_if_missing=False)[0])

    storm_object_table = tracking_io.read_many_files(tracking_file_names)
    print(SEPARATOR_STRING)

    latitude_limits_deg, longitude_limits_deg = _get_plotting_limits(
        min_plot_latitude_deg=min_plot_latitude_deg,
        max_plot_latitude_deg=max_plot_latitude_deg,
        min_plot_longitude_deg=min_plot_longitude_deg,
        max_plot_longitude_deg=max_plot_longitude_deg,
        storm_object_table=storm_object_table)

    min_plot_latitude_deg = latitude_limits_deg[0]
    max_plot_latitude_deg = latitude_limits_deg[1]
    min_plot_longitude_deg = longitude_limits_deg[0]
    max_plot_longitude_deg = longitude_limits_deg[1]

    valid_times_unix_sec = numpy.unique(
        storm_object_table[tracking_utils.VALID_TIME_COLUMN].values)
    num_times = len(valid_times_unix_sec)

    for i in range(num_times):
        these_current_rows = numpy.where(
            storm_object_table[tracking_utils.VALID_TIME_COLUMN].values ==
            valid_times_unix_sec[i])[0]

        these_current_subrows = _filter_storm_objects_latlng(
            storm_object_table=storm_object_table.iloc[these_current_rows],
            min_latitude_deg=min_plot_latitude_deg,
            max_latitude_deg=max_plot_latitude_deg,
            min_longitude_deg=min_plot_longitude_deg,
            max_longitude_deg=max_plot_longitude_deg)

        if len(these_current_subrows) == 0:
            continue

        these_current_rows = these_current_rows[these_current_subrows]

        this_storm_object_table = _find_relevant_storm_objects(
            storm_object_table=storm_object_table,
            current_rows=these_current_rows)

        these_latlng_rows = _filter_storm_objects_latlng(
            storm_object_table=this_storm_object_table,
            min_latitude_deg=min_plot_latitude_deg,
            max_latitude_deg=max_plot_latitude_deg,
            min_longitude_deg=min_plot_longitude_deg,
            max_longitude_deg=max_plot_longitude_deg)

        if top_myrorss_dir_name is None:
            this_radar_matrix = None
            these_radar_latitudes_deg = None
            these_radar_longitudes_deg = None
        else:
            this_myrorss_file_name = myrorss_and_mrms_io.find_raw_file(
                top_directory_name=top_myrorss_dir_name,
                unix_time_sec=valid_times_unix_sec[i],
                spc_date_string=time_conversion.time_to_spc_date_string(
                    valid_times_unix_sec[i]),
                field_name=radar_field_name,
                data_source=radar_utils.MYRORSS_SOURCE_ID,
                height_m_asl=radar_height_m_asl,
                raise_error_if_missing=True)

            print(
                'Reading data from: "{0:s}"...'.format(this_myrorss_file_name))

            this_metadata_dict = (
                myrorss_and_mrms_io.read_metadata_from_raw_file(
                    netcdf_file_name=this_myrorss_file_name,
                    data_source=radar_utils.MYRORSS_SOURCE_ID))

            this_sparse_grid_table = (
                myrorss_and_mrms_io.read_data_from_sparse_grid_file(
                    netcdf_file_name=this_myrorss_file_name,
                    field_name_orig=this_metadata_dict[
                        myrorss_and_mrms_io.FIELD_NAME_COLUMN_ORIG],
                    data_source=radar_utils.MYRORSS_SOURCE_ID,
                    sentinel_values=this_metadata_dict[
                        radar_utils.SENTINEL_VALUE_COLUMN]))

            (this_radar_matrix, these_radar_latitudes_deg,
             these_radar_longitudes_deg) = radar_s2f.sparse_to_full_grid(
                 sparse_grid_table=this_sparse_grid_table,
                 metadata_dict=this_metadata_dict)

            this_radar_matrix = numpy.flipud(this_radar_matrix)
            these_radar_latitudes_deg = these_radar_latitudes_deg[::-1]

        _, this_axes_object, this_basemap_object = (
            plotting_utils.create_equidist_cylindrical_map(
                min_latitude_deg=min_plot_latitude_deg,
                max_latitude_deg=max_plot_latitude_deg,
                min_longitude_deg=min_plot_longitude_deg,
                max_longitude_deg=max_plot_longitude_deg,
                resolution_string='i'))

        _plot_storm_outlines_one_time(
            storm_object_table=this_storm_object_table.iloc[these_latlng_rows],
            valid_time_unix_sec=valid_times_unix_sec[i],
            axes_object=this_axes_object,
            basemap_object=this_basemap_object,
            storm_colour=storm_colour,
            storm_opacity=storm_opacity,
            include_secondary_ids=include_secondary_ids,
            output_dir_name=output_dir_name,
            radar_matrix=this_radar_matrix,
            radar_field_name=radar_field_name,
            radar_latitudes_deg=these_radar_latitudes_deg,
            radar_longitudes_deg=these_radar_longitudes_deg)
def _plot_one_example(full_id_string,
                      storm_time_unix_sec,
                      target_name,
                      forecast_probability,
                      tornado_dir_name,
                      top_tracking_dir_name,
                      top_myrorss_dir_name,
                      radar_field_name,
                      radar_height_m_asl,
                      latitude_buffer_deg,
                      longitude_buffer_deg,
                      top_output_dir_name,
                      aux_forecast_probabilities=None,
                      aux_activation_dict=None):
    """Plots one example with surrounding context at several times.

    N = number of storm objects read from auxiliary activation file

    :param full_id_string: Full storm ID.
    :param storm_time_unix_sec: Storm time.
    :param target_name: Name of target variable.
    :param forecast_probability: Forecast tornado probability for this example.
    :param tornado_dir_name: See documentation at top of file.
    :param top_tracking_dir_name: Same.
    :param top_myrorss_dir_name: Same.
    :param radar_field_name: Same.
    :param radar_height_m_asl: Same.
    :param latitude_buffer_deg: Same.
    :param longitude_buffer_deg: Same.
    :param top_output_dir_name: Same.
    :param aux_forecast_probabilities: length-N numpy array of forecast
        probabilities.  If this is None, will not plot forecast probs in maps.
    :param aux_activation_dict: Dictionary returned by
        `model_activation.read_file` from auxiliary file.  If this is None, will
        not plot forecast probs in maps.
    """

    storm_time_string = time_conversion.unix_sec_to_string(
        storm_time_unix_sec, TIME_FORMAT)

    # Create output directory for this example.
    output_dir_name = '{0:s}/{1:s}_{2:s}'.format(top_output_dir_name,
                                                 full_id_string,
                                                 storm_time_string)
    file_system_utils.mkdir_recursive_if_necessary(
        directory_name=output_dir_name)

    # Find tracking files.
    tracking_file_names = _find_tracking_files_one_example(
        valid_time_unix_sec=storm_time_unix_sec,
        top_tracking_dir_name=top_tracking_dir_name,
        target_name=target_name)

    tracking_times_unix_sec = numpy.array(
        [tracking_io.file_name_to_time(f) for f in tracking_file_names],
        dtype=int)

    tracking_time_strings = [
        time_conversion.unix_sec_to_string(t, TIME_FORMAT)
        for t in tracking_times_unix_sec
    ]

    # Read tracking files.
    storm_object_table = tracking_io.read_many_files(tracking_file_names)
    print('\n')

    if aux_activation_dict is not None:
        these_indices = tracking_utils.find_storm_objects(
            all_id_strings=aux_activation_dict[model_activation.FULL_IDS_KEY],
            all_times_unix_sec=aux_activation_dict[
                model_activation.STORM_TIMES_KEY],
            id_strings_to_keep=storm_object_table[
                tracking_utils.FULL_ID_COLUMN].values.tolist(),
            times_to_keep_unix_sec=storm_object_table[
                tracking_utils.VALID_TIME_COLUMN].values,
            allow_missing=True)

        storm_object_probs = numpy.array([
            aux_forecast_probabilities[k] if k >= 0 else numpy.nan
            for k in these_indices
        ])

        storm_object_table = storm_object_table.assign(
            **{FORECAST_PROBABILITY_COLUMN: storm_object_probs})

    primary_id_string = temporal_tracking.full_to_partial_ids([full_id_string
                                                               ])[0][0]

    this_storm_object_table = storm_object_table.loc[storm_object_table[
        tracking_utils.PRIMARY_ID_COLUMN] == primary_id_string]

    latitude_limits_deg, longitude_limits_deg = _get_plotting_limits(
        storm_object_table=this_storm_object_table,
        latitude_buffer_deg=latitude_buffer_deg,
        longitude_buffer_deg=longitude_buffer_deg)

    storm_min_latitudes_deg = numpy.array([
        numpy.min(numpy.array(p.exterior.xy[1])) for p in storm_object_table[
            tracking_utils.LATLNG_POLYGON_COLUMN].values
    ])

    storm_max_latitudes_deg = numpy.array([
        numpy.max(numpy.array(p.exterior.xy[1])) for p in storm_object_table[
            tracking_utils.LATLNG_POLYGON_COLUMN].values
    ])

    storm_min_longitudes_deg = numpy.array([
        numpy.min(numpy.array(p.exterior.xy[0])) for p in storm_object_table[
            tracking_utils.LATLNG_POLYGON_COLUMN].values
    ])

    storm_max_longitudes_deg = numpy.array([
        numpy.max(numpy.array(p.exterior.xy[0])) for p in storm_object_table[
            tracking_utils.LATLNG_POLYGON_COLUMN].values
    ])

    min_latitude_flags = numpy.logical_and(
        storm_min_latitudes_deg >= latitude_limits_deg[0],
        storm_min_latitudes_deg <= latitude_limits_deg[1])

    max_latitude_flags = numpy.logical_and(
        storm_max_latitudes_deg >= latitude_limits_deg[0],
        storm_max_latitudes_deg <= latitude_limits_deg[1])

    latitude_flags = numpy.logical_or(min_latitude_flags, max_latitude_flags)

    min_longitude_flags = numpy.logical_and(
        storm_min_longitudes_deg >= longitude_limits_deg[0],
        storm_min_longitudes_deg <= longitude_limits_deg[1])

    max_longitude_flags = numpy.logical_and(
        storm_max_longitudes_deg >= longitude_limits_deg[0],
        storm_max_longitudes_deg <= longitude_limits_deg[1])

    longitude_flags = numpy.logical_or(min_longitude_flags,
                                       max_longitude_flags)
    good_indices = numpy.where(
        numpy.logical_and(latitude_flags, longitude_flags))[0]

    storm_object_table = storm_object_table.iloc[good_indices]

    # Read tornado reports.
    target_param_dict = target_val_utils.target_name_to_params(target_name)
    min_lead_time_seconds = target_param_dict[
        target_val_utils.MIN_LEAD_TIME_KEY]
    max_lead_time_seconds = target_param_dict[
        target_val_utils.MAX_LEAD_TIME_KEY]

    tornado_table = linkage._read_input_tornado_reports(
        input_directory_name=tornado_dir_name,
        storm_times_unix_sec=numpy.array([storm_time_unix_sec], dtype=int),
        max_time_before_storm_start_sec=-1 * min_lead_time_seconds,
        max_time_after_storm_end_sec=max_lead_time_seconds,
        genesis_only=True)

    tornado_table = tornado_table.loc[
        (tornado_table[linkage.EVENT_LATITUDE_COLUMN] >= latitude_limits_deg[0]
         )
        & (tornado_table[linkage.EVENT_LATITUDE_COLUMN] <=
           latitude_limits_deg[1])]

    tornado_table = tornado_table.loc[
        (tornado_table[linkage.EVENT_LONGITUDE_COLUMN] >=
         longitude_limits_deg[0])
        & (tornado_table[linkage.EVENT_LONGITUDE_COLUMN] <=
           longitude_limits_deg[1])]

    for i in range(len(tracking_file_names)):
        this_storm_object_table = storm_object_table.loc[storm_object_table[
            tracking_utils.VALID_TIME_COLUMN] == tracking_times_unix_sec[i]]

        _plot_one_example_one_time(
            storm_object_table=this_storm_object_table,
            full_id_string=full_id_string,
            valid_time_unix_sec=tracking_times_unix_sec[i],
            tornado_table=copy.deepcopy(tornado_table),
            top_myrorss_dir_name=top_myrorss_dir_name,
            radar_field_name=radar_field_name,
            radar_height_m_asl=radar_height_m_asl,
            latitude_limits_deg=latitude_limits_deg,
            longitude_limits_deg=longitude_limits_deg)

        if aux_activation_dict is None:
            this_title_string = (
                'Valid time = {0:s} ... forecast prob at {1:s} = {2:.3f}'
            ).format(tracking_time_strings[i], storm_time_string,
                     forecast_probability)

            pyplot.title(this_title_string, fontsize=TITLE_FONT_SIZE)

        this_file_name = '{0:s}/{1:s}.jpg'.format(output_dir_name,
                                                  tracking_time_strings[i])

        print('Saving figure to file: "{0:s}"...\n'.format(this_file_name))
        pyplot.savefig(this_file_name, dpi=FIGURE_RESOLUTION_DPI)
        pyplot.close()

        imagemagick_utils.trim_whitespace(input_file_name=this_file_name,
                                          output_file_name=this_file_name)
def _extract_storm_images(num_image_rows, num_image_columns, rotate_grids,
                          rotated_grid_spacing_metres, radar_field_names,
                          refl_heights_m_agl, spc_date_string,
                          tarred_myrorss_dir_name, untarred_myrorss_dir_name,
                          top_tracking_dir_name, elevation_dir_name,
                          tracking_scale_metres2, target_name,
                          top_target_dir_name, top_output_dir_name):
    """Extracts storm-centered img for each field/height pair and storm object.

    :param num_image_rows: See documentation at top of file.
    :param num_image_columns: Same.
    :param rotate_grids: Same.
    :param rotated_grid_spacing_metres: Same.
    :param radar_field_names: Same.
    :param refl_heights_m_agl: Same.
    :param spc_date_string: Same.
    :param tarred_myrorss_dir_name: Same.
    :param untarred_myrorss_dir_name: Same.
    :param top_tracking_dir_name: Same.
    :param elevation_dir_name: Same.
    :param tracking_scale_metres2: Same.
    :param target_name: Same.
    :param top_target_dir_name: Same.
    :param top_output_dir_name: Same.
    """

    if target_name in ['', 'None']:
        target_name = None

    if target_name is not None:
        target_param_dict = target_val_utils.target_name_to_params(target_name)

        target_file_name = target_val_utils.find_target_file(
            top_directory_name=top_target_dir_name,
            event_type_string=target_param_dict[
                target_val_utils.EVENT_TYPE_KEY],
            spc_date_string=spc_date_string)

        print('Reading data from: "{0:s}"...'.format(target_file_name))
        target_dict = target_val_utils.read_target_values(
            netcdf_file_name=target_file_name, target_names=[target_name])
        print('\n')

    refl_heights_m_asl = radar_utils.get_valid_heights(
        data_source=radar_utils.MYRORSS_SOURCE_ID,
        field_name=radar_utils.REFL_NAME)

    # Untar files with azimuthal shear.
    az_shear_field_names = list(
        set(radar_field_names) & set(ALL_AZ_SHEAR_FIELD_NAMES))

    if len(az_shear_field_names):
        az_shear_tar_file_name = (
            '{0:s}/{1:s}/azimuthal_shear_only/{2:s}.tar').format(
                tarred_myrorss_dir_name, spc_date_string[:4], spc_date_string)

        myrorss_io.unzip_1day_tar_file(
            tar_file_name=az_shear_tar_file_name,
            field_names=az_shear_field_names,
            spc_date_string=spc_date_string,
            top_target_directory_name=untarred_myrorss_dir_name)
        print(SEPARATOR_STRING)

    # Untar files with other radar fields.
    non_shear_field_names = list(
        set(radar_field_names) - set(ALL_AZ_SHEAR_FIELD_NAMES))

    if len(non_shear_field_names):
        non_shear_tar_file_name = '{0:s}/{1:s}/{2:s}.tar'.format(
            tarred_myrorss_dir_name, spc_date_string[:4], spc_date_string)

        myrorss_io.unzip_1day_tar_file(
            tar_file_name=non_shear_tar_file_name,
            field_names=non_shear_field_names,
            spc_date_string=spc_date_string,
            top_target_directory_name=untarred_myrorss_dir_name,
            refl_heights_m_asl=refl_heights_m_asl)
        print(SEPARATOR_STRING)

    # Read storm tracks for the given SPC date.
    tracking_file_names = tracking_io.find_files_one_spc_date(
        spc_date_string=spc_date_string,
        source_name=tracking_utils.SEGMOTION_NAME,
        top_tracking_dir_name=top_tracking_dir_name,
        tracking_scale_metres2=tracking_scale_metres2)[0]

    storm_object_table = tracking_io.read_many_files(tracking_file_names)[
        storm_images.STORM_COLUMNS_NEEDED]
    print(SEPARATOR_STRING)

    if target_name is not None:
        print(('Removing storm objects without target values (variable = '
               '"{0:s}")...').format(target_name))

        these_indices = tracking_utils.find_storm_objects(
            all_id_strings=storm_object_table[
                tracking_utils.FULL_ID_COLUMN].values.tolist(),
            all_times_unix_sec=storm_object_table[
                tracking_utils.VALID_TIME_COLUMN].values.astype(int),
            id_strings_to_keep=target_dict[target_val_utils.FULL_IDS_KEY],
            times_to_keep_unix_sec=target_dict[
                target_val_utils.VALID_TIMES_KEY],
            allow_missing=False)

        num_storm_objects_orig = len(storm_object_table.index)
        storm_object_table = storm_object_table.iloc[these_indices]
        num_storm_objects = len(storm_object_table.index)

        print('Removed {0:d} of {1:d} storm objects!\n'.format(
            num_storm_objects_orig - num_storm_objects,
            num_storm_objects_orig))

    # Extract storm-centered radar images.
    storm_images.extract_storm_images_myrorss_or_mrms(
        storm_object_table=storm_object_table,
        radar_source=radar_utils.MYRORSS_SOURCE_ID,
        top_radar_dir_name=untarred_myrorss_dir_name,
        top_output_dir_name=top_output_dir_name,
        elevation_dir_name=elevation_dir_name,
        num_storm_image_rows=num_image_rows,
        num_storm_image_columns=num_image_columns,
        rotate_grids=rotate_grids,
        rotated_grid_spacing_metres=rotated_grid_spacing_metres,
        radar_field_names=radar_field_names,
        reflectivity_heights_m_agl=refl_heights_m_agl)
    print(SEPARATOR_STRING)

    # Remove untarred MYRORSS files.
    myrorss_io.remove_unzipped_data_1day(
        spc_date_string=spc_date_string,
        top_directory_name=untarred_myrorss_dir_name,
        field_names=radar_field_names,
        refl_heights_m_asl=refl_heights_m_asl)
    print(SEPARATOR_STRING)
예제 #12
0
def _extract_storm_images(
        num_image_rows, num_image_columns, rotate_grids,
        rotated_grid_spacing_metres, radar_field_names, refl_heights_m_agl,
        spc_date_string, first_time_string, last_time_string,
        tarred_myrorss_dir_name, untarred_myrorss_dir_name,
        top_tracking_dir_name, elevation_dir_name, tracking_scale_metres2,
        target_name, top_target_dir_name, top_output_dir_name):
    """Extracts storm-centered img for each field/height pair and storm object.

    :param num_image_rows: See documentation at top of file.
    :param num_image_columns: Same.
    :param rotate_grids: Same.
    :param rotated_grid_spacing_metres: Same.
    :param radar_field_names: Same.
    :param refl_heights_m_agl: Same.
    :param spc_date_string: Same.
    :param first_time_string: Same.
    :param last_time_string: Same.
    :param tarred_myrorss_dir_name: Same.
    :param untarred_myrorss_dir_name: Same.
    :param top_tracking_dir_name: Same.
    :param elevation_dir_name: Same.
    :param tracking_scale_metres2: Same.
    :param target_name: Same.
    :param top_target_dir_name: Same.
    :param top_output_dir_name: Same.
    :raises: ValueError: if `first_time_string` and `last_time_string` have
        different SPC dates.
    """

    if elevation_dir_name in ['', 'None']:
        elevation_dir_name = None

    if elevation_dir_name is None:
        host_name = socket.gethostname()

        if 'casper' in host_name:
            elevation_dir_name = '/glade/work/ryanlage/elevation'
        else:
            elevation_dir_name = '/condo/swatwork/ralager/elevation'

    if spc_date_string in ['', 'None']:
        first_time_unix_sec = time_conversion.string_to_unix_sec(
            first_time_string, TIME_FORMAT)
        last_time_unix_sec = time_conversion.string_to_unix_sec(
            last_time_string, TIME_FORMAT)

        first_spc_date_string = time_conversion.time_to_spc_date_string(
            first_time_unix_sec)
        last_spc_date_string = time_conversion.time_to_spc_date_string(
            last_time_unix_sec)

        if first_spc_date_string != last_spc_date_string:
            error_string = (
                'First ({0:s}) and last ({1:s}) times have different SPC dates.'
                '  This script can handle only one SPC date.'
            ).format(first_time_string, last_time_string)

            raise ValueError(error_string)

        spc_date_string = first_spc_date_string
    else:
        first_time_unix_sec = 0
        last_time_unix_sec = int(1e12)

    if tarred_myrorss_dir_name in ['', 'None']:
        tarred_myrorss_dir_name = None
    if target_name in ['', 'None']:
        target_name = None

    if target_name is not None:
        target_param_dict = target_val_utils.target_name_to_params(target_name)

        target_file_name = target_val_utils.find_target_file(
            top_directory_name=top_target_dir_name,
            event_type_string=target_param_dict[
                target_val_utils.EVENT_TYPE_KEY],
            spc_date_string=spc_date_string)

        print('Reading data from: "{0:s}"...'.format(target_file_name))
        target_dict = target_val_utils.read_target_values(
            netcdf_file_name=target_file_name, target_names=[target_name]
        )
        print('\n')

    refl_heights_m_asl = radar_utils.get_valid_heights(
        data_source=radar_utils.MYRORSS_SOURCE_ID,
        field_name=radar_utils.REFL_NAME)

    # Untar files.
    if tarred_myrorss_dir_name is not None:
        az_shear_field_names = list(
            set(radar_field_names) & set(ALL_AZ_SHEAR_FIELD_NAMES)
        )

        if len(az_shear_field_names) > 0:
            az_shear_tar_file_name = (
                '{0:s}/{1:s}/azimuthal_shear_only/{2:s}.tar'
            ).format(
                tarred_myrorss_dir_name, spc_date_string[:4], spc_date_string
            )

            myrorss_io.unzip_1day_tar_file(
                tar_file_name=az_shear_tar_file_name,
                field_names=az_shear_field_names,
                spc_date_string=spc_date_string,
                top_target_directory_name=untarred_myrorss_dir_name)
            print(SEPARATOR_STRING)

        non_shear_field_names = list(
            set(radar_field_names) - set(ALL_AZ_SHEAR_FIELD_NAMES)
        )

        if len(non_shear_field_names) > 0:
            non_shear_tar_file_name = '{0:s}/{1:s}/{2:s}.tar'.format(
                tarred_myrorss_dir_name, spc_date_string[:4], spc_date_string
            )

            myrorss_io.unzip_1day_tar_file(
                tar_file_name=non_shear_tar_file_name,
                field_names=non_shear_field_names,
                spc_date_string=spc_date_string,
                top_target_directory_name=untarred_myrorss_dir_name,
                refl_heights_m_asl=refl_heights_m_asl)
            print(SEPARATOR_STRING)

    # Read storm tracks for the given SPC date.
    tracking_file_names = tracking_io.find_files_one_spc_date(
        spc_date_string=spc_date_string,
        source_name=tracking_utils.SEGMOTION_NAME,
        top_tracking_dir_name=top_tracking_dir_name,
        tracking_scale_metres2=tracking_scale_metres2
    )[0]

    file_times_unix_sec = numpy.array(
        [tracking_io.file_name_to_time(f) for f in tracking_file_names],
        dtype=int
    )

    good_indices = numpy.where(numpy.logical_and(
        file_times_unix_sec >= first_time_unix_sec,
        file_times_unix_sec <= last_time_unix_sec
    ))[0]

    tracking_file_names = [tracking_file_names[k] for k in good_indices]

    storm_object_table = tracking_io.read_many_files(
        tracking_file_names
    )[storm_images.STORM_COLUMNS_NEEDED]
    print(SEPARATOR_STRING)

    if target_name is not None:
        print((
            'Removing storm objects without target values (variable = '
            '"{0:s}")...'
        ).format(target_name))

        these_indices = tracking_utils.find_storm_objects(
            all_id_strings=storm_object_table[
                tracking_utils.FULL_ID_COLUMN].values.tolist(),
            all_times_unix_sec=storm_object_table[
                tracking_utils.VALID_TIME_COLUMN].values.astype(int),
            id_strings_to_keep=target_dict[target_val_utils.FULL_IDS_KEY],
            times_to_keep_unix_sec=target_dict[
                target_val_utils.VALID_TIMES_KEY],
            allow_missing=False)

        num_storm_objects_orig = len(storm_object_table.index)
        storm_object_table = storm_object_table.iloc[these_indices]
        num_storm_objects = len(storm_object_table.index)

        print('Removed {0:d} of {1:d} storm objects!\n'.format(
            num_storm_objects_orig - num_storm_objects, num_storm_objects_orig
        ))

    # Extract storm-centered radar images.
    storm_images.extract_storm_images_myrorss_or_mrms(
        storm_object_table=storm_object_table,
        radar_source=radar_utils.MYRORSS_SOURCE_ID,
        top_radar_dir_name=untarred_myrorss_dir_name,
        top_output_dir_name=top_output_dir_name,
        elevation_dir_name=elevation_dir_name,
        num_storm_image_rows=num_image_rows,
        num_storm_image_columns=num_image_columns, rotate_grids=rotate_grids,
        rotated_grid_spacing_metres=rotated_grid_spacing_metres,
        radar_field_names=radar_field_names,
        reflectivity_heights_m_agl=refl_heights_m_agl)
    print(SEPARATOR_STRING)

    # Remove untarred MYRORSS files.
    if tarred_myrorss_dir_name is not None:
        myrorss_io.remove_unzipped_data_1day(
            spc_date_string=spc_date_string,
            top_directory_name=untarred_myrorss_dir_name,
            field_names=radar_field_names,
            refl_heights_m_asl=refl_heights_m_asl)
예제 #13
0
def _run(input_prediction_file_name, top_tracking_dir_name,
         tracking_scale_metres2, x_spacing_metres, y_spacing_metres,
         effective_radius_metres, smoothing_method_name,
         smoothing_cutoff_radius_metres, smoothing_efold_radius_metres,
         top_output_dir_name):
    """Projects CNN forecasts onto the RAP grid.

    This is effectively the same method.

    :param input_prediction_file_name: See documentation at top of file.
    :param top_tracking_dir_name: Same.
    :param tracking_scale_metres2: Same.
    :param x_spacing_metres: Same.
    :param y_spacing_metres: Same.
    :param effective_radius_metres: Same.
    :param smoothing_method_name: Same.
    :param smoothing_cutoff_radius_metres: Same.
    :param smoothing_efold_radius_metres: Same.
    :param top_output_dir_name: Same.
    """

    print('Reading data from: "{0:s}"...'.format(input_prediction_file_name))
    ungridded_forecast_dict = prediction_io.read_ungridded_predictions(
        input_prediction_file_name)

    target_param_dict = target_val_utils.target_name_to_params(
        ungridded_forecast_dict[prediction_io.TARGET_NAME_KEY])

    min_buffer_dist_metres = target_param_dict[
        target_val_utils.MIN_LINKAGE_DISTANCE_KEY]

    # TODO(thunderhoser): This is HACKY.
    if min_buffer_dist_metres == 0:
        min_buffer_dist_metres = numpy.nan

    max_buffer_dist_metres = target_param_dict[
        target_val_utils.MAX_LINKAGE_DISTANCE_KEY]

    min_lead_time_seconds = target_param_dict[
        target_val_utils.MIN_LEAD_TIME_KEY]

    max_lead_time_seconds = target_param_dict[
        target_val_utils.MAX_LEAD_TIME_KEY]

    forecast_column_name = gridded_forecasts._buffer_to_column_name(
        min_buffer_dist_metres=min_buffer_dist_metres,
        max_buffer_dist_metres=max_buffer_dist_metres,
        column_type=gridded_forecasts.FORECAST_COLUMN_TYPE)

    init_times_unix_sec = numpy.unique(
        ungridded_forecast_dict[prediction_io.STORM_TIMES_KEY])

    tracking_file_names = []

    for this_time_unix_sec in init_times_unix_sec:
        this_tracking_file_name = tracking_io.find_file(
            top_tracking_dir_name=top_tracking_dir_name,
            tracking_scale_metres2=tracking_scale_metres2,
            source_name=tracking_utils.SEGMOTION_NAME,
            valid_time_unix_sec=this_time_unix_sec,
            spc_date_string=time_conversion.time_to_spc_date_string(
                this_time_unix_sec),
            raise_error_if_missing=True)

        tracking_file_names.append(this_tracking_file_name)

    storm_object_table = tracking_io.read_many_files(tracking_file_names)
    print(SEPARATOR_STRING)

    tracking_utils.find_storm_objects(
        all_id_strings=ungridded_forecast_dict[prediction_io.STORM_IDS_KEY],
        all_times_unix_sec=ungridded_forecast_dict[
            prediction_io.STORM_TIMES_KEY],
        id_strings_to_keep=storm_object_table[
            tracking_utils.FULL_ID_COLUMN].values.tolist(),
        times_to_keep_unix_sec=storm_object_table[
            tracking_utils.VALID_TIME_COLUMN].values,
        allow_missing=False)

    sort_indices = tracking_utils.find_storm_objects(
        all_id_strings=storm_object_table[
            tracking_utils.FULL_ID_COLUMN].values.tolist(),
        all_times_unix_sec=storm_object_table[
            tracking_utils.VALID_TIME_COLUMN].values,
        id_strings_to_keep=ungridded_forecast_dict[
            prediction_io.STORM_IDS_KEY],
        times_to_keep_unix_sec=ungridded_forecast_dict[
            prediction_io.STORM_TIMES_KEY],
        allow_missing=False)

    forecast_probabilities = ungridded_forecast_dict[
        prediction_io.PROBABILITY_MATRIX_KEY][sort_indices, 1]

    storm_object_table = storm_object_table.assign(
        **{forecast_column_name: forecast_probabilities})

    gridded_forecast_dict = gridded_forecasts.create_forecast_grids(
        storm_object_table=storm_object_table,
        min_lead_time_sec=min_lead_time_seconds,
        max_lead_time_sec=max_lead_time_seconds,
        lead_time_resolution_sec=gridded_forecasts.
        DEFAULT_LEAD_TIME_RES_SECONDS,
        grid_spacing_x_metres=x_spacing_metres,
        grid_spacing_y_metres=y_spacing_metres,
        interp_to_latlng_grid=False,
        prob_radius_for_grid_metres=effective_radius_metres,
        smoothing_method=smoothing_method_name,
        smoothing_e_folding_radius_metres=smoothing_efold_radius_metres,
        smoothing_cutoff_radius_metres=smoothing_cutoff_radius_metres)

    print(SEPARATOR_STRING)

    output_file_name = prediction_io.find_file(
        top_prediction_dir_name=top_output_dir_name,
        first_init_time_unix_sec=numpy.min(
            storm_object_table[tracking_utils.VALID_TIME_COLUMN].values),
        last_init_time_unix_sec=numpy.max(
            storm_object_table[tracking_utils.VALID_TIME_COLUMN].values),
        gridded=True,
        raise_error_if_missing=False)

    print(('Writing results (forecast grids for {0:d} initial times) to: '
           '"{1:s}"...').format(
               len(gridded_forecast_dict[prediction_io.INIT_TIMES_KEY]),
               output_file_name))

    prediction_io.write_gridded_predictions(
        gridded_forecast_dict=gridded_forecast_dict,
        pickle_file_name=output_file_name)
def _run(input_warning_file_name, top_tracking_dir_name, spc_date_string,
         max_distance_metres, min_lifetime_fraction, output_warning_file_name):
    """Links each NWS tornado warning to nearest storm.

    This is effectively the main method.

    :param input_warning_file_name: See documentation at top of file.
    :param top_tracking_dir_name: Same.
    :param spc_date_string: Same.
    :param max_distance_metres: Same.
    :param min_lifetime_fraction: Same.
    :param output_warning_file_name: Same.
    """

    error_checking.assert_is_greater(max_distance_metres, 0.)
    error_checking.assert_is_greater(min_lifetime_fraction, 0.)
    error_checking.assert_is_leq(min_lifetime_fraction, 1.)

    print('Reading warnings from: "{0:s}"...'.format(input_warning_file_name))
    this_file_handle = open(input_warning_file_name, 'rb')
    warning_table = pickle.load(this_file_handle)
    this_file_handle.close()

    date_start_time_unix_sec = (
        time_conversion.get_start_of_spc_date(spc_date_string))
    date_end_time_unix_sec = (
        time_conversion.get_end_of_spc_date(spc_date_string))
    warning_table = warning_table.loc[
        (warning_table[WARNING_START_TIME_KEY] >= date_start_time_unix_sec)
        & (warning_table[WARNING_START_TIME_KEY] <= date_end_time_unix_sec)]
    num_warnings = len(warning_table.index)

    print('Number of warnings beginning on SPC date "{0:s}" = {1:d}'.format(
        spc_date_string, num_warnings))

    warning_polygon_objects_xy = [None] * num_warnings
    nested_array = warning_table[[
        WARNING_START_TIME_KEY, WARNING_START_TIME_KEY
    ]].values.tolist()

    warning_table = warning_table.assign(
        **{
            WARNING_XY_POLYGON_KEY: warning_polygon_objects_xy,
            LINKED_SECONDARY_IDS_KEY: nested_array
        })

    for k in range(num_warnings):
        warning_table[LINKED_SECONDARY_IDS_KEY].values[k] = []

        this_object_latlng = warning_table[WARNING_LATLNG_POLYGON_KEY].values[
            k]

        warning_table[WARNING_XY_POLYGON_KEY].values[k], _ = (
            polygons.project_latlng_to_xy(
                polygon_object_latlng=this_object_latlng,
                projection_object=PROJECTION_OBJECT))

    tracking_file_names = []

    for i in [-1, 0, 1]:
        this_spc_date_string = time_conversion.time_to_spc_date_string(
            date_start_time_unix_sec + i * NUM_SECONDS_PER_DAY)

        # tracking_file_names += tracking_io.find_files_one_spc_date(
        #     top_tracking_dir_name=top_tracking_dir_name,
        #     tracking_scale_metres2=DUMMY_TRACKING_SCALE_METRES2,
        #     source_name=tracking_utils.SEGMOTION_NAME,
        #     spc_date_string=this_spc_date_string,
        #     raise_error_if_missing=i == 0
        # )[0]

        tracking_file_names += tracking_io.find_files_one_spc_date(
            top_tracking_dir_name=top_tracking_dir_name,
            tracking_scale_metres2=DUMMY_TRACKING_SCALE_METRES2,
            source_name=tracking_utils.SEGMOTION_NAME,
            spc_date_string=this_spc_date_string,
            raise_error_if_missing=False)[0]

    if len(tracking_file_names) == 0:
        _write_linked_warnings(warning_table=warning_table,
                               output_file_name=output_warning_file_name)

        return

    print(SEPARATOR_STRING)
    storm_object_table = tracking_io.read_many_files(tracking_file_names)
    print(SEPARATOR_STRING)

    if len(storm_object_table.index) == 0:
        _write_linked_warnings(warning_table=warning_table,
                               output_file_name=output_warning_file_name)

        return

    storm_object_table = linkage._project_storms_latlng_to_xy(
        storm_object_table=storm_object_table,
        projection_object=PROJECTION_OBJECT)

    for k in range(num_warnings):
        this_start_time_string = time_conversion.unix_sec_to_string(
            warning_table[WARNING_START_TIME_KEY].values[k],
            LOG_MESSAGE_TIME_FORMAT)

        this_end_time_string = time_conversion.unix_sec_to_string(
            warning_table[WARNING_END_TIME_KEY].values[k],
            LOG_MESSAGE_TIME_FORMAT)

        print('Attempting to link warning from {0:s} to {1:s}...'.format(
            this_start_time_string, this_end_time_string))

        warning_table[LINKED_SECONDARY_IDS_KEY].values[k] = _link_one_warning(
            warning_table=warning_table.iloc[[k]],
            storm_object_table=copy.deepcopy(storm_object_table),
            max_distance_metres=max_distance_metres,
            min_lifetime_fraction=min_lifetime_fraction)

        print('\n')

    _write_linked_warnings(warning_table=warning_table,
                           output_file_name=output_warning_file_name)
예제 #15
0
def _shuffle_io_for_renaming(input_file_names_by_date,
                             output_file_names_by_date,
                             valid_times_by_date_unix_sec,
                             storm_object_table_by_date, working_date_index):
    """Shuffles data into and out of memory for renaming storms.

    For more on renaming storms, see doc for `rename_storms`.

    N = number of dates

    :param input_file_names_by_date: length-N list created by
        `_find_io_files_for_renaming`.
    :param output_file_names_by_date: Same.
    :param valid_times_by_date_unix_sec: Same.
    :param storm_object_table_by_date: length-N list, where the [i]th element is
        a pandas DataFrame with tracking data for the [i]th date.  At any given
        time, all but 3 items should be None.  Each table has columns documented
        in `storm_tracking_io.write_file`, plus the following column.
    storm_object_table_by_date.date_index: Array index.  If date_index[i] = j,
        the [i]th row (storm object) comes from the [j]th date.
    :param working_date_index: Index of date currently being processed.  Only
        dates (working_date_index - 1)...(working_date_index + 1) need to be in
        memory.  If None, this method will write/clear all data currently in
        memory, without reading new data.
    :return: storm_object_table_by_date: Same as input, except that different
        items are filled and different items are None.
    """

    num_dates = len(input_file_names_by_date)

    if working_date_index is None:
        date_needed_indices = numpy.array([-1], dtype=int)
    else:
        date_needed_indices = _get_dates_needed_for_renaming_storms(
            working_date_index=working_date_index,
            num_dates_in_period=num_dates)

    for j in range(num_dates):
        if j in date_needed_indices and storm_object_table_by_date[j] is None:
            storm_object_table_by_date[j] = tracking_io.read_many_files(
                input_file_names_by_date[j].tolist())
            print(SEPARATOR_STRING)

            this_num_storm_objects = len(storm_object_table_by_date[j].index)
            these_date_indices = numpy.full(this_num_storm_objects,
                                            j,
                                            dtype=int)

            storm_object_table_by_date[j] = (
                storm_object_table_by_date[j].assign(
                    **{DATE_INDEX_KEY: these_date_indices}))

        if j not in date_needed_indices:
            if storm_object_table_by_date[j] is not None:
                these_output_file_names = output_file_names_by_date[j]
                these_valid_times_unix_sec = valid_times_by_date_unix_sec[j]

                for k in range(len(these_valid_times_unix_sec)):
                    print('Writing new data to "{0:s}"...'.format(
                        these_output_file_names[k]))

                    these_indices = numpy.where(
                        storm_object_table_by_date[j][
                            tracking_utils.VALID_TIME_COLUMN].values ==
                        these_valid_times_unix_sec[k])[0]

                    tracking_io.write_file(
                        storm_object_table=storm_object_table_by_date[j].
                        iloc[these_indices],
                        pickle_file_name=these_output_file_names[k])

                print(SEPARATOR_STRING)

            storm_object_table_by_date[j] = None

    if working_date_index is None:
        return storm_object_table_by_date

    for j in date_needed_indices[1:]:
        storm_object_table_by_date[j], _ = storm_object_table_by_date[j].align(
            storm_object_table_by_date[j - 1], axis=1)

    return storm_object_table_by_date