Example #1
0
def _compute_shape_stats(spc_date_string, top_tracking_dir_name,
                         tracking_scale_metres2, output_dir_name):
    """Computes shape statistics for each storm object.

    :param spc_date_string: SPC (Storm Prediction Center) date in format
        "yyyymmdd".  Shape statistics will be computed for all storm objects on
        this date.
    :param top_tracking_dir_name: Name of top-level directory with storm-
        tracking data.
    :param tracking_scale_metres2: Tracking scale (minimum storm area).  Will be
        used to find input data.
    :param output_dir_name: Name of output directory.  A single Pickle file,
        with shape statistics for each storm object, will be written here.
    """

    tracking_file_names, _ = tracking_io.find_processed_files_one_spc_date(
        spc_date_string=spc_date_string,
        data_source=tracking_utils.SEGMOTION_SOURCE_ID,
        top_processed_dir_name=top_tracking_dir_name,
        tracking_scale_metres2=tracking_scale_metres2)

    storm_object_table = tracking_io.read_many_processed_files(
        tracking_file_names)
    print SEPARATOR_STRING

    shape_statistic_table = shape_stats.get_stats_for_storm_objects(
        storm_object_table)
    print SEPARATOR_STRING

    shape_statistic_file_name = '{0:s}/shape_statistics_{1:s}.p'.format(
        output_dir_name, spc_date_string)
    print 'Writing shape statistics to: "{0:s}"...'.format(
        shape_statistic_file_name)
    shape_stats.write_stats_for_storm_objects(shape_statistic_table,
                                              shape_statistic_file_name)
def _compute_radar_stats_from_gridrad(spc_date_string, top_tracking_dir_name,
                                      tracking_scale_metres2,
                                      top_gridrad_dir_name, output_dir_name):
    """Uses GridRad data to compute radar statistics for each storm object.

    :param spc_date_string: SPC (Storm Prediction Center) date in format
        "yyyymmdd".  Radar stats will be computed for all storm objects on this
        date.
    :param top_tracking_dir_name: Name of top-level directory with storm-
        tracking files.  Storm objects will be read from here.
    :param tracking_scale_metres2: Tracking scale (minimum storm area).  Will be
        used to find tracking files.
    :param top_gridrad_dir_name: Name of top-level directory with GridRad files.
    :param output_dir_name: Name of output directory.  A single Pickle file,
        with radar stats for each storm object, will be written here.
    """

    file_system_utils.mkdir_recursive_if_necessary(
        directory_name=output_dir_name)

    tracking_file_names, _ = tracking_io.find_processed_files_one_spc_date(
        spc_date_string=spc_date_string,
        data_source=tracking_utils.SEGMOTION_SOURCE_ID,
        top_processed_dir_name=top_tracking_dir_name,
        tracking_scale_metres2=tracking_scale_metres2)

    storm_object_table = tracking_io.read_many_processed_files(
        tracking_file_names)
    print SEPARATOR_STRING

    storm_object_statistic_table = (
        radar_statistics.get_storm_based_radar_stats_gridrad(
            storm_object_table=storm_object_table,
            top_radar_dir_name=top_gridrad_dir_name))
    print SEPARATOR_STRING

    output_file_name = '{0:s}/radar_stats_for_storm_objects_{1:s}.p'.format(
        output_dir_name, spc_date_string)
    print 'Writing radar statistics to file: "{0:s}"...'.format(
        output_file_name)
    radar_statistics.write_stats_for_storm_objects(
        storm_object_statistic_table, output_file_name)
def get_storm_object_table(num_spc_dates, climatology_type, working_date_index):
    date_in_memory_indices = _get_dates_needed(working_date_index, num_spc_dates, climatology_type)
    for i in range(num_spc_dates):
        if i in date_in_memory_indices:
            if storm_object_table_by_spc_date[i] is None:
                these_tracking_file_names = (tracking_io.find_processed_files_one_spc_date(
                                                spc_date_string=spc_date_strings[i],
                                                data_source='segmotion',
                                                top_processed_dir_name=TOP_PROCESSED_DIR_NAME,
                                                tracking_scale_metres2=TRACKING_SCALE_METRES2))
                storm_object_table_by_spc_date[i] = (tracking_io.read_many_processed_files(
                                                    these_tracking_file_names))

        else:
            print 'Clearing data for SPC date "{0:s}"...'.format(spc_date_strings[i])
            storm_object_table_by_spc_date[i] = None

    for j in date_in_memory_indices[1:]:
        storm_object_table_by_spc_date[j], _ = (storm_object_table_by_spc_date[j].align(
                                                storm_object_table_by_spc_date[date_in_memory_indices[0]], axis=1))
    storm_object_tables_to_concat = [storm_object_table_by_spc_date[j] for j in date_in_memory_indices]
    multiday_storm_object_table = pandas.concat(storm_object_tables_to_concat, axis=0, ignore_index=True)
    multiday_storm_object_table = multiday_storm_object_table[multiday_storm_object_table['age_sec']>= 900]
    return multiday_storm_object_table
Example #4
0
        for i in range(num_spc_dates):
            if i in date_in_memory_indices:
                if storm_object_table_by_spc_date[i] is None:

                    # Find tracking files for [i]th date.
                    these_tracking_file_names = (
                        tracking_io.find_processed_files_one_spc_date(
                            spc_date_string=spc_date_strings[i],
                            data_source='segmotion',
                            top_processed_dir_name=TOP_PROCESSED_DIR_NAME,
                            tracking_scale_metres2=TRACKING_SCALE_METRES2))

                    # Read tracking files for [i]th date.
                    storm_object_table_by_spc_date[i] = (
                        tracking_io.read_many_processed_files(
                            these_tracking_file_names))

                else:
                    print 'Clearing data for SPC date "{0:s}"...'.format(
                        spc_date_strings[i])
                    storm_object_table_by_spc_date[i] = None

        print SEPARATOR_STRING

        for j in date_in_memory_indices[1:]:
            storm_object_table_by_spc_date[j], _ = (
                storm_object_table_by_spc_date[j].align(
                    storm_object_table_by_spc_date[date_in_memory_indices[0]],
                    axis=1))

        storm_object_tables_to_concat = [
Example #5
0
def _extract_storm_images(num_image_rows, num_image_columns, rotate_grids,
                          rotated_grid_spacing_metres, radar_field_names,
                          radar_heights_m_agl, spc_date_string,
                          top_radar_dir_name, top_tracking_dir_name,
                          tracking_scale_metres2, target_name,
                          top_target_dir_name, top_output_dir_name):
    """Extracts storm-centered radar images from GridRad data.

    :param num_image_rows: See documentation at top of file.
    :param num_image_columns: Same.
    :param rotate_grids: Same.
    :param rotated_grid_spacing_metres: Same.
    :param radar_field_names: Same.
    :param radar_heights_m_agl: Same.
    :param spc_date_string: Same.
    :param top_radar_dir_name: Same.
    :param top_tracking_dir_name: Same.
    :param tracking_scale_metres2: Same.
    :param target_name: Same.
    :param top_target_dir_name: Same.
    :param top_output_dir_name: Same.
    """

    if target_name in ['', 'None']:
        target_name = None

    if target_name is not None:
        target_param_dict = target_val_utils.target_name_to_params(target_name)
        target_file_name = target_val_utils.find_target_file(
            top_directory_name=top_target_dir_name,
            event_type_string=target_param_dict[
                target_val_utils.EVENT_TYPE_KEY],
            spc_date_string=spc_date_string)

        print 'Reading data from: "{0:s}"...'.format(target_file_name)
        target_dict = target_val_utils.read_target_values(
            netcdf_file_name=target_file_name, target_name=target_name)
        print '\n'

    # Find storm objects on the given SPC date.
    tracking_file_names = tracking_io.find_processed_files_one_spc_date(
        spc_date_string=spc_date_string,
        data_source=tracking_utils.SEGMOTION_SOURCE_ID,
        top_processed_dir_name=top_tracking_dir_name,
        tracking_scale_metres2=tracking_scale_metres2)[0]

    # Read storm objects on the given SPC date.
    storm_object_table = tracking_io.read_many_processed_files(
        tracking_file_names)[storm_images.STORM_COLUMNS_NEEDED]
    print SEPARATOR_STRING

    if target_name is not None:
        print(
            'Removing storm objects without target values (variable = '
            '"{0:s}")...').format(target_name)

        these_indices = tracking_utils.find_storm_objects(
            all_storm_ids=storm_object_table[
                tracking_utils.STORM_ID_COLUMN].values.tolist(),
            all_times_unix_sec=storm_object_table[
                tracking_utils.TIME_COLUMN].values.astype(int),
            storm_ids_to_keep=target_dict[target_val_utils.STORM_IDS_KEY],
            times_to_keep_unix_sec=target_dict[
                target_val_utils.VALID_TIMES_KEY],
            allow_missing=False)

        num_storm_objects_orig = len(storm_object_table.index)
        storm_object_table = storm_object_table.iloc[these_indices]
        num_storm_objects = len(storm_object_table.index)

        print 'Removed {0:d} of {1:d} storm objects!\n'.format(
            num_storm_objects_orig - num_storm_objects, num_storm_objects_orig)

    # Extract storm-centered radar images.
    storm_images.extract_storm_images_gridrad(
        storm_object_table=storm_object_table,
        top_radar_dir_name=top_radar_dir_name,
        top_output_dir_name=top_output_dir_name,
        num_storm_image_rows=num_image_rows,
        num_storm_image_columns=num_image_columns,
        rotate_grids=rotate_grids,
        rotated_grid_spacing_metres=rotated_grid_spacing_metres,
        radar_field_names=radar_field_names,
        radar_heights_m_agl=radar_heights_m_agl)
def _shuffle_io_for_renaming(input_file_names_by_date,
                             output_file_names_by_date,
                             valid_times_by_date_unix_sec,
                             storm_object_table_by_date, working_date_index):
    """Shuffles data into and out of memory for renaming storms.

    For more on renaming storms, see doc for `rename_storms`.

    N = number of dates

    :param input_file_names_by_date: length-N list created by
        `_find_io_files_for_renaming`.
    :param output_file_names_by_date: Same.
    :param valid_times_by_date_unix_sec: Same.
    :param storm_object_table_by_date: length-N list, where the [i]th element is
        a pandas DataFrame with tracking data for the [i]th date.  At any given
        time, all but 3 items should be None.  Each table has columns documented
        in `storm_tracking_io.write_processed_file`, plus the following column.
    storm_object_table_by_date.date_index: Array index.  If date_index[i] = j,
        the [i]th row (storm object) comes from the [j]th date.
    :param working_date_index: Index of date currently being processed.  Only
        dates (working_date_index - 1)...(working_date_index + 1) need to be in
        memory.  If None, this method will write/clear all data currently in
        memory, without reading new data.
    :return: storm_object_table_by_date: Same as input, except that different
        items are filled and different items are None.
    """

    num_dates = len(input_file_names_by_date)

    if working_date_index is None:
        date_needed_indices = numpy.array([-1], dtype=int)
    else:
        date_needed_indices = _get_dates_needed_for_renaming_storms(
            working_date_index=working_date_index,
            num_dates_in_period=num_dates)

    for j in range(num_dates):
        if j in date_needed_indices and storm_object_table_by_date[j] is None:
            storm_object_table_by_date[j] = (
                tracking_io.read_many_processed_files(
                    input_file_names_by_date[j].tolist()))
            print SEPARATOR_STRING

            this_num_storm_objects = len(storm_object_table_by_date[j].index)
            these_date_indices = numpy.full(this_num_storm_objects,
                                            j,
                                            dtype=int)
            argument_dict = {DATE_INDEX_KEY: these_date_indices}
            storm_object_table_by_date[j] = storm_object_table_by_date[
                j].assign(**argument_dict)

        if j not in date_needed_indices:
            if storm_object_table_by_date[j] is not None:
                these_output_file_names = output_file_names_by_date[j]
                these_valid_times_unix_sec = valid_times_by_date_unix_sec[j]

                for k in range(len(these_valid_times_unix_sec)):
                    print 'Writing new data to "{0:s}"...'.format(
                        these_output_file_names[k])

                    these_indices = numpy.where(
                        storm_object_table_by_date[j][
                            tracking_utils.TIME_COLUMN].values ==
                        these_valid_times_unix_sec[k])[0]
                    tracking_io.write_processed_file(
                        storm_object_table=storm_object_table_by_date[j].
                        iloc[these_indices],
                        pickle_file_name=these_output_file_names[k])

                print SEPARATOR_STRING

            storm_object_table_by_date[j] = None

    if working_date_index is None:
        return storm_object_table_by_date

    for j in date_needed_indices[1:]:
        storm_object_table_by_date[j], _ = storm_object_table_by_date[j].align(
            storm_object_table_by_date[j - 1], axis=1)

    return storm_object_table_by_date
    for working_date_index in range(num_spc_dates):
        date_in_memory_indices = utils._get_dates_needed(working_date_index=working_date_index, num_dates=num_spc_dates,
            climatology_type=PASSAGE_CLIMATOLOGY_TYPE)
        for i in range(num_spc_dates):
            if i in date_in_memory_indices:
                if storm_object_table_by_spc_date[i] is None:

                    # Find tracking files for [i]th date.
                    these_tracking_file_names = (tracking_io.find_processed_files_one_spc_date(
                                                    spc_date_string=spc_date_strings[i],
                                                    data_source='segmotion',
                                                    top_processed_dir_name=TOP_PROCESSED_DIR_NAME,
                                                    tracking_scale_metres2=TRACKING_SCALE_METRES2))

                    # Read tracking files for [i]th date.
                    storm_object_table_by_spc_date[i] = (tracking_io.read_many_processed_files(
                                                        these_tracking_file_names))

            else:
                print 'Clearing data for SPC date "{0:s}"...'.format(spc_date_strings[i])
                storm_object_table_by_spc_date[i] = None
        
    #this is the part that initializes the grid array        
        for j in date_in_memory_indices[1:]:
            storm_object_table_by_spc_date[j], _ = (storm_object_table_by_spc_date[j].align(
                                                    storm_object_table_by_spc_date[date_in_memory_indices[0]], axis=1))
        storm_object_tables_to_concat = [storm_object_table_by_spc_date[j] for j in date_in_memory_indices]
        multiday_storm_object_table = pandas.concat(storm_object_tables_to_concat, axis=0, ignore_index=True)
        centroids_x_metres, centroids_y_metres = projections.project_latlng_to_xy(
                        latitudes_deg=multiday_storm_object_table[tracking_utils.CENTROID_LAT_COLUMN].values,
                        longitudes_deg=multiday_storm_object_table[tracking_utils.CENTROID_LNG_COLUMN].values,
                        projection_object=projection_object)
def _interp_soundings(spc_date_string, lead_times_seconds,
                      lag_time_for_convective_contamination_sec,
                      top_ruc_directory_name, top_rap_directory_name,
                      top_tracking_dir_name, tracking_scale_metres2,
                      top_output_dir_name):
    """Interpolates NWP sounding to each storm object at each lead time.

    :param spc_date_string: See documentation at top of file.
    :param lead_times_seconds: Same.
    :param lag_time_for_convective_contamination_sec: Same.
    :param top_ruc_directory_name: Same.
    :param top_rap_directory_name: Same.
    :param top_tracking_dir_name: Same.
    :param tracking_scale_metres2: Same.
    :param top_output_dir_name: Same.
    :raises: ValueError: if model-initialization times needed are on opposite
        sides of 0000 UTC 1 May 2012 (the cutoff between RUC and RAP models).
    """

    lead_times_seconds = numpy.array(lead_times_seconds, dtype=int)

    tracking_file_names, _ = tracking_io.find_processed_files_one_spc_date(
        spc_date_string=spc_date_string,
        data_source=tracking_utils.SEGMOTION_SOURCE_ID,
        top_processed_dir_name=top_tracking_dir_name,
        tracking_scale_metres2=tracking_scale_metres2)

    storm_object_table = tracking_io.read_many_processed_files(
        tracking_file_names)
    print SEPARATOR_STRING

    first_storm_time_unix_sec = numpy.min(
        storm_object_table[tracking_utils.TIME_COLUMN].values)
    last_storm_time_unix_sec = numpy.max(
        storm_object_table[tracking_utils.TIME_COLUMN].values)

    first_init_time_unix_sec = number_rounding.floor_to_nearest(
        (first_storm_time_unix_sec + numpy.min(lead_times_seconds) -
         lag_time_for_convective_contamination_sec), HOURS_TO_SECONDS)
    last_init_time_unix_sec = number_rounding.floor_to_nearest(
        (last_storm_time_unix_sec + numpy.max(lead_times_seconds) -
         lag_time_for_convective_contamination_sec), HOURS_TO_SECONDS)

    extreme_init_times_unix_sec = numpy.array(
        [first_init_time_unix_sec, last_init_time_unix_sec], dtype=int)

    if numpy.all(extreme_init_times_unix_sec < FIRST_RAP_TIME_UNIX_SEC):
        top_grib_directory_name = top_ruc_directory_name
        model_name = nwp_model_utils.RUC_MODEL_NAME
    elif numpy.all(extreme_init_times_unix_sec >= FIRST_RAP_TIME_UNIX_SEC):
        top_grib_directory_name = top_rap_directory_name
        model_name = nwp_model_utils.RAP_MODEL_NAME
    else:
        first_storm_time_string = time_conversion.unix_sec_to_string(
            first_storm_time_unix_sec, STORM_TIME_FORMAT)
        last_storm_time_string = time_conversion.unix_sec_to_string(
            last_storm_time_unix_sec, STORM_TIME_FORMAT)
        first_init_time_string = time_conversion.unix_sec_to_string(
            first_init_time_unix_sec, MODEL_INIT_TIME_FORMAT)
        last_init_time_string = time_conversion.unix_sec_to_string(
            last_init_time_unix_sec, MODEL_INIT_TIME_FORMAT)

        error_string = (
            'First and last storm times are {0:s} and {1:s}.  Thus, first and '
            'last model-initialization times needed are {2:s} and {3:s}, which '
            'are on opposite sides of {4:s} (the cutoff between RUC and RAP '
            'models).  The code is not generalized enough to interp data from '
            'two different models.  Sorry, eh?').format(
                first_storm_time_string, last_storm_time_string,
                first_init_time_string, last_init_time_string,
                FIRST_RAP_TIME_STRING)
        raise ValueError(error_string)

    sounding_dict_by_lead_time = soundings.interp_soundings_to_storm_objects(
        storm_object_table=storm_object_table,
        top_grib_directory_name=top_grib_directory_name,
        model_name=model_name,
        use_all_grids=True,
        height_levels_m_agl=soundings.DEFAULT_HEIGHT_LEVELS_M_AGL,
        lead_times_seconds=lead_times_seconds,
        lag_time_for_convective_contamination_sec=
        lag_time_for_convective_contamination_sec,
        wgrib_exe_name=WGRIB_EXE_NAME,
        wgrib2_exe_name=WGRIB2_EXE_NAME,
        raise_error_if_missing=False)
    print SEPARATOR_STRING

    num_lead_times = len(lead_times_seconds)
    for k in range(num_lead_times):
        this_sounding_file_name = soundings.find_sounding_file(
            top_directory_name=top_output_dir_name,
            spc_date_string=spc_date_string,
            lead_time_seconds=lead_times_seconds[k],
            lag_time_for_convective_contamination_sec=
            lag_time_for_convective_contamination_sec,
            raise_error_if_missing=False)

        print 'Writing soundings to: "{0:s}"...'.format(
            this_sounding_file_name)
        soundings.write_soundings(
            netcdf_file_name=this_sounding_file_name,
            sounding_dict_height_coords=sounding_dict_by_lead_time[k],
            lead_time_seconds=lead_times_seconds[k],
            lag_time_for_convective_contamination_sec=
            lag_time_for_convective_contamination_sec)
def _extract_storm_images(num_image_rows, num_image_columns, rotate_grids,
                          rotated_grid_spacing_metres, radar_field_names,
                          refl_heights_m_agl, spc_date_string,
                          tarred_myrorss_dir_name, untarred_myrorss_dir_name,
                          top_tracking_dir_name, tracking_scale_metres2,
                          target_name, top_target_dir_name,
                          top_output_dir_name):
    """Extracts storm-centered img for each field/height pair and storm object.

    :param num_image_rows: See documentation at top of file.
    :param num_image_columns: Same.
    :param rotate_grids: Same.
    :param rotated_grid_spacing_metres: Same.
    :param radar_field_names: Same.
    :param refl_heights_m_agl: Same.
    :param spc_date_string: Same.
    :param tarred_myrorss_dir_name: Same.
    :param untarred_myrorss_dir_name: Same.
    :param top_tracking_dir_name: Same.
    :param tracking_scale_metres2: Same.
    :param target_name: Same.
    :param top_target_dir_name: Same.
    :param top_output_dir_name: Same.
    """

    if target_name in ['', 'None']:
        target_name = None

    if target_name is not None:
        target_param_dict = target_val_utils.target_name_to_params(target_name)
        target_file_name = target_val_utils.find_target_file(
            top_directory_name=top_target_dir_name,
            event_type_string=target_param_dict[
                target_val_utils.EVENT_TYPE_KEY],
            spc_date_string=spc_date_string)

        print 'Reading data from: "{0:s}"...'.format(target_file_name)
        target_dict = target_val_utils.read_target_values(
            netcdf_file_name=target_file_name, target_name=target_name)
        print '\n'

    refl_heights_m_asl = radar_utils.get_valid_heights(
        data_source=radar_utils.MYRORSS_SOURCE_ID,
        field_name=radar_utils.REFL_NAME)

    # Untar files with azimuthal shear.
    az_shear_field_names = list(
        set(radar_field_names) & set(ALL_AZ_SHEAR_FIELD_NAMES))

    if len(az_shear_field_names):
        az_shear_tar_file_name = (
            '{0:s}/{1:s}/azimuthal_shear_only/{2:s}.tar'.format(
                tarred_myrorss_dir_name, spc_date_string[:4], spc_date_string))

        myrorss_io.unzip_1day_tar_file(
            tar_file_name=az_shear_tar_file_name,
            field_names=az_shear_field_names,
            spc_date_string=spc_date_string,
            top_target_directory_name=untarred_myrorss_dir_name)
        print SEPARATOR_STRING

    # Untar files with other radar fields.
    non_shear_field_names = list(
        set(radar_field_names) - set(ALL_AZ_SHEAR_FIELD_NAMES))

    if len(non_shear_field_names):
        non_shear_tar_file_name = '{0:s}/{1:s}/{2:s}.tar'.format(
            tarred_myrorss_dir_name, spc_date_string[:4], spc_date_string)

        myrorss_io.unzip_1day_tar_file(
            tar_file_name=non_shear_tar_file_name,
            field_names=non_shear_field_names,
            spc_date_string=spc_date_string,
            top_target_directory_name=untarred_myrorss_dir_name,
            refl_heights_m_asl=refl_heights_m_asl)
        print SEPARATOR_STRING

    # Read storm tracks for the given SPC date.
    tracking_file_names = tracking_io.find_processed_files_one_spc_date(
        spc_date_string=spc_date_string,
        data_source=tracking_utils.SEGMOTION_SOURCE_ID,
        top_processed_dir_name=top_tracking_dir_name,
        tracking_scale_metres2=tracking_scale_metres2)[0]

    storm_object_table = tracking_io.read_many_processed_files(
        tracking_file_names)[storm_images.STORM_COLUMNS_NEEDED]
    print SEPARATOR_STRING

    if target_name is not None:
        print(
            'Removing storm objects without target values (variable = '
            '"{0:s}")...').format(target_name)

        these_indices = tracking_utils.find_storm_objects(
            all_storm_ids=storm_object_table[
                tracking_utils.STORM_ID_COLUMN].values.tolist(),
            all_times_unix_sec=storm_object_table[
                tracking_utils.TIME_COLUMN].values.astype(int),
            storm_ids_to_keep=target_dict[target_val_utils.STORM_IDS_KEY],
            times_to_keep_unix_sec=target_dict[
                target_val_utils.VALID_TIMES_KEY],
            allow_missing=False)

        num_storm_objects_orig = len(storm_object_table.index)
        storm_object_table = storm_object_table.iloc[these_indices]
        num_storm_objects = len(storm_object_table.index)

        print 'Removed {0:d} of {1:d} storm objects!\n'.format(
            num_storm_objects_orig - num_storm_objects, num_storm_objects_orig)

    # Extract storm-centered radar images.
    storm_images.extract_storm_images_myrorss_or_mrms(
        storm_object_table=storm_object_table,
        radar_source=radar_utils.MYRORSS_SOURCE_ID,
        top_radar_dir_name=untarred_myrorss_dir_name,
        top_output_dir_name=top_output_dir_name,
        num_storm_image_rows=num_image_rows,
        num_storm_image_columns=num_image_columns,
        rotate_grids=rotate_grids,
        rotated_grid_spacing_metres=rotated_grid_spacing_metres,
        radar_field_names=radar_field_names,
        reflectivity_heights_m_agl=refl_heights_m_agl)
    print SEPARATOR_STRING

    # Remove untarred MYRORSS files.
    myrorss_io.remove_unzipped_data_1day(
        spc_date_string=spc_date_string,
        top_directory_name=untarred_myrorss_dir_name,
        field_names=radar_field_names,
        refl_heights_m_asl=refl_heights_m_asl)
    print SEPARATOR_STRING