def test_fields_and_refl_heights_to_dict_mrms(self):
        """Ensures correct output from fields_and_refl_heights_to_dict.

        In this case, data source is MRMS.
        """

        this_dictionary = (
            myrorss_and_mrms_utils.fields_and_refl_heights_to_dict(
                field_names=FIELD_NAMES,
                data_source=radar_utils.MRMS_SOURCE_ID,
                refl_heights_m_asl=REFLECTIVITY_HEIGHTS_M_ASL))

        self.assertTrue(this_dictionary == FIELD_TO_HEIGHTS_DICT_MRMS_M_ASL)
Ejemplo n.º 2
0
def unzip_1day_tar_file(
        tar_file_name, field_names, spc_date_string, top_target_directory_name,
        refl_heights_m_asl=None):
    """Unzips 1-day tar file (containing raw MYRORSS data for one SPC date).

    :param tar_file_name: Path to input file.
    :param field_names: 1-D list with names of radar fields.
    :param spc_date_string: SPC date (format "yyyymmdd").
    :param top_target_directory_name: Name of top-level directory for unzipped
        MYRORSS files.  This method will create a subdirectory therein for the
        SPC date.
    :param refl_heights_m_asl: 1-D numpy array of reflectivity heights (metres
        above sea level).
    :return: target_directory_name: Path to output directory.
    """

    # Verification.
    _ = time_conversion.spc_date_string_to_unix_sec(spc_date_string)
    error_checking.assert_is_string_list(field_names)
    error_checking.assert_is_numpy_array(
        numpy.asarray(field_names), num_dimensions=1)
    error_checking.assert_is_string(top_target_directory_name)

    # Put azimuthal-shear fields (which are allowed to be missing) at the end.
    # This way, if the tar command errors out due to missing data, it will do so
    # after unzipping all the non-missing data.
    field_names_removed = []
    for this_field_name in AZIMUTHAL_RADAR_FIELD_NAMES:
        if this_field_name in field_names:
            field_names.remove(this_field_name)
            field_names_removed.append(this_field_name)

    for this_field_name in field_names_removed:
        field_names.append(this_field_name)

    field_to_heights_dict_m_asl = (
        myrorss_and_mrms_utils.fields_and_refl_heights_to_dict(
            field_names=field_names, data_source=radar_utils.MYRORSS_SOURCE_ID,
            refl_heights_m_asl=refl_heights_m_asl))

    target_directory_name = '{0:s}/{1:s}/{2:s}'.format(
        top_target_directory_name, spc_date_string[:4], spc_date_string
    )

    field_names = list(field_to_heights_dict_m_asl.keys())
    directory_names_to_unzip = []

    for this_field_name in field_names:
        these_heights_m_asl = field_to_heights_dict_m_asl[this_field_name]

        for this_height_m_asl in these_heights_m_asl:
            directory_names_to_unzip.append(
                myrorss_and_mrms_io.get_relative_dir_for_raw_files(
                    field_name=this_field_name,
                    data_source=radar_utils.MYRORSS_SOURCE_ID,
                    height_m_asl=this_height_m_asl))

    unzipping.unzip_tar(
        tar_file_name,
        target_directory_name=target_directory_name,
        file_and_dir_names_to_unzip=directory_names_to_unzip)

    return target_directory_name
Ejemplo n.º 3
0
def remove_unzipped_data_1day(
        spc_date_string, top_directory_name,
        field_names=DEFAULT_FIELDS_TO_REMOVE,
        refl_heights_m_asl=DEFAULT_REFL_HEIGHTS_TO_REMOVE_M_ASL):
    """Removes unzipped MYRORSS data for one SPC date.

    Basically, this method cleans up after unzip_1day_tar_file.

    :param spc_date_string: SPC date (format "yyyymmdd").
    :param top_directory_name: Name of top-level directory with unzipped MYRORSS
        files.  This method will find the subdirectory in `top_directory_name`
        for the given SPC date.
    :param field_names: 1-D list with names of radar fields.  Only these will be
        deleted.
    :param refl_heights_m_asl: 1-D numpy array of reflectivity heights (metres
        above sea level).
    """

    spc_date_unix_sec = time_conversion.spc_date_string_to_unix_sec(
        spc_date_string)

    field_to_heights_dict_m_asl = (
        myrorss_and_mrms_utils.fields_and_refl_heights_to_dict(
            field_names=field_names, data_source=radar_utils.MYRORSS_SOURCE_ID,
            refl_heights_m_asl=refl_heights_m_asl))

    for this_field_name in list(field_to_heights_dict_m_asl.keys()):
        these_heights_m_asl = field_to_heights_dict_m_asl[this_field_name]

        for this_height_m_asl in these_heights_m_asl:
            example_file_name = myrorss_and_mrms_io.find_raw_file(
                unix_time_sec=spc_date_unix_sec,
                spc_date_string=spc_date_string, field_name=this_field_name,
                data_source=radar_utils.MYRORSS_SOURCE_ID,
                top_directory_name=top_directory_name,
                height_m_asl=this_height_m_asl,
                raise_error_if_missing=False)

            example_directory_name, _ = os.path.split(example_file_name)
            directory_name_parts = example_directory_name.split('/')
            remove_all_heights = False

            if this_field_name == radar_utils.REFL_NAME:
                if (set(these_heights_m_asl) ==
                        set(DEFAULT_REFL_HEIGHTS_TO_REMOVE_M_ASL)):
                    remove_all_heights = True
                    dir_name_to_remove = '/'.join(directory_name_parts[:-1])
                else:
                    dir_name_to_remove = '/'.join(directory_name_parts)

            else:
                dir_name_to_remove = '/'.join(directory_name_parts[:-1])

            if os.path.isdir(dir_name_to_remove):
                print('Removing directory "{0:s}"...'.format(
                    dir_name_to_remove))

                shutil.rmtree(dir_name_to_remove, ignore_errors=True)

            if remove_all_heights:
                break
Ejemplo n.º 4
0
def write_field_to_myrorss_file(field_matrix,
                                netcdf_file_name,
                                field_name,
                                metadata_dict,
                                height_m_asl=None):
    """Writes field to MYRORSS-formatted file.

    M = number of rows (unique grid-point latitudes)
    N = number of columns (unique grid-point longitudes)

    :param field_matrix: M-by-N numpy array with one radar variable at one time.
        Latitude should increase down each column, and longitude should increase
        to the right along each row.
    :param netcdf_file_name: Path to output file.
    :param field_name: Name of radar field in GewitterGefahr format.
    :param metadata_dict: Dictionary created by either
        `gridrad_io.read_metadata_from_full_grid_file` or
        `read_metadata_from_raw_file`.
    :param height_m_asl: Height of radar field (metres above sea level).
    """

    if field_name == radar_utils.REFL_NAME:
        field_to_heights_dict_m_asl = (
            myrorss_and_mrms_utils.fields_and_refl_heights_to_dict(
                field_names=[field_name],
                data_source=radar_utils.MYRORSS_SOURCE_ID,
                refl_heights_m_asl=numpy.array([height_m_asl])))

    else:
        field_to_heights_dict_m_asl = (
            myrorss_and_mrms_utils.fields_and_refl_heights_to_dict(
                field_names=[field_name],
                data_source=radar_utils.MYRORSS_SOURCE_ID))

    field_name = list(field_to_heights_dict_m_asl.keys())[0]
    radar_height_m_asl = field_to_heights_dict_m_asl[field_name][0]

    if field_name in radar_utils.ECHO_TOP_NAMES:
        field_matrix = METRES_TO_KM * field_matrix
    field_name_myrorss = radar_utils.field_name_new_to_orig(
        field_name=field_name, data_source_name=radar_utils.MYRORSS_SOURCE_ID)

    file_system_utils.mkdir_recursive_if_necessary(file_name=netcdf_file_name)
    netcdf_dataset = Dataset(netcdf_file_name,
                             'w',
                             format='NETCDF3_64BIT_OFFSET')

    netcdf_dataset.setncattr(FIELD_NAME_COLUMN_ORIG, field_name_myrorss)
    netcdf_dataset.setncattr('DataType', 'SparseLatLonGrid')

    netcdf_dataset.setncattr(
        NW_GRID_POINT_LAT_COLUMN_ORIG,
        rounder.round_to_nearest(
            metadata_dict[radar_utils.NW_GRID_POINT_LAT_COLUMN],
            LATLNG_MULTIPLE_DEG))
    netcdf_dataset.setncattr(
        NW_GRID_POINT_LNG_COLUMN_ORIG,
        rounder.round_to_nearest(
            metadata_dict[radar_utils.NW_GRID_POINT_LNG_COLUMN],
            LATLNG_MULTIPLE_DEG))
    netcdf_dataset.setncattr(HEIGHT_COLUMN_ORIG,
                             METRES_TO_KM * numpy.float(radar_height_m_asl))
    netcdf_dataset.setncattr(
        UNIX_TIME_COLUMN_ORIG,
        numpy.int32(metadata_dict[radar_utils.UNIX_TIME_COLUMN]))
    netcdf_dataset.setncattr('FractionalTime', 0.)

    netcdf_dataset.setncattr('attributes', ' ColorMap SubType Unit')
    netcdf_dataset.setncattr('ColorMap-unit', 'dimensionless')
    netcdf_dataset.setncattr('ColorMap-value', '')
    netcdf_dataset.setncattr('SubType-unit', 'dimensionless')
    netcdf_dataset.setncattr('SubType-value', numpy.float(radar_height_m_asl))
    netcdf_dataset.setncattr('Unit-unit', 'dimensionless')
    netcdf_dataset.setncattr('Unit-value', 'dimensionless')

    netcdf_dataset.setncattr(
        LAT_SPACING_COLUMN_ORIG,
        rounder.round_to_nearest(metadata_dict[radar_utils.LAT_SPACING_COLUMN],
                                 LATLNG_MULTIPLE_DEG))
    netcdf_dataset.setncattr(
        LNG_SPACING_COLUMN_ORIG,
        rounder.round_to_nearest(metadata_dict[radar_utils.LNG_SPACING_COLUMN],
                                 LATLNG_MULTIPLE_DEG))
    netcdf_dataset.setncattr(SENTINEL_VALUE_COLUMNS_ORIG[0],
                             numpy.double(-99000.))
    netcdf_dataset.setncattr(SENTINEL_VALUE_COLUMNS_ORIG[1],
                             numpy.double(-99001.))

    min_latitude_deg = metadata_dict[radar_utils.NW_GRID_POINT_LAT_COLUMN] - (
        metadata_dict[radar_utils.LAT_SPACING_COLUMN] *
        (metadata_dict[radar_utils.NUM_LAT_COLUMN] - 1))
    unique_grid_point_lats_deg, unique_grid_point_lngs_deg = (
        grids.get_latlng_grid_points(
            min_latitude_deg=min_latitude_deg,
            min_longitude_deg=metadata_dict[
                radar_utils.NW_GRID_POINT_LNG_COLUMN],
            lat_spacing_deg=metadata_dict[radar_utils.LAT_SPACING_COLUMN],
            lng_spacing_deg=metadata_dict[radar_utils.LNG_SPACING_COLUMN],
            num_rows=metadata_dict[radar_utils.NUM_LAT_COLUMN],
            num_columns=metadata_dict[radar_utils.NUM_LNG_COLUMN]))

    num_grid_rows = len(unique_grid_point_lats_deg)
    num_grid_columns = len(unique_grid_point_lngs_deg)
    field_vector = numpy.reshape(field_matrix,
                                 num_grid_rows * num_grid_columns)

    grid_point_lat_matrix, grid_point_lng_matrix = (
        grids.latlng_vectors_to_matrices(unique_grid_point_lats_deg,
                                         unique_grid_point_lngs_deg))
    grid_point_lat_vector = numpy.reshape(grid_point_lat_matrix,
                                          num_grid_rows * num_grid_columns)
    grid_point_lng_vector = numpy.reshape(grid_point_lng_matrix,
                                          num_grid_rows * num_grid_columns)

    real_value_indices = numpy.where(numpy.invert(
        numpy.isnan(field_vector)))[0]
    netcdf_dataset.createDimension(NUM_LAT_COLUMN_ORIG, num_grid_rows - 1)
    netcdf_dataset.createDimension(NUM_LNG_COLUMN_ORIG, num_grid_columns - 1)
    netcdf_dataset.createDimension(NUM_PIXELS_COLUMN_ORIG,
                                   len(real_value_indices))

    row_index_vector, column_index_vector = radar_utils.latlng_to_rowcol(
        grid_point_lat_vector,
        grid_point_lng_vector,
        nw_grid_point_lat_deg=metadata_dict[
            radar_utils.NW_GRID_POINT_LAT_COLUMN],
        nw_grid_point_lng_deg=metadata_dict[
            radar_utils.NW_GRID_POINT_LNG_COLUMN],
        lat_spacing_deg=metadata_dict[radar_utils.LAT_SPACING_COLUMN],
        lng_spacing_deg=metadata_dict[radar_utils.LNG_SPACING_COLUMN])

    netcdf_dataset.createVariable(field_name_myrorss, numpy.single,
                                  (NUM_PIXELS_COLUMN_ORIG, ))
    netcdf_dataset.createVariable(GRID_ROW_COLUMN_ORIG, numpy.int16,
                                  (NUM_PIXELS_COLUMN_ORIG, ))
    netcdf_dataset.createVariable(GRID_COLUMN_COLUMN_ORIG, numpy.int16,
                                  (NUM_PIXELS_COLUMN_ORIG, ))
    netcdf_dataset.createVariable(NUM_GRID_CELL_COLUMN_ORIG, numpy.int32,
                                  (NUM_PIXELS_COLUMN_ORIG, ))

    netcdf_dataset.variables[field_name_myrorss].setncattr(
        'BackgroundValue', numpy.int32(-99900))
    netcdf_dataset.variables[field_name_myrorss].setncattr(
        'units', 'dimensionless')
    netcdf_dataset.variables[field_name_myrorss].setncattr(
        'NumValidRuns', numpy.int32(len(real_value_indices)))

    netcdf_dataset.variables[field_name_myrorss][:] = field_vector[
        real_value_indices]
    netcdf_dataset.variables[GRID_ROW_COLUMN_ORIG][:] = (
        row_index_vector[real_value_indices])
    netcdf_dataset.variables[GRID_COLUMN_COLUMN_ORIG][:] = (
        column_index_vector[real_value_indices])
    netcdf_dataset.variables[NUM_GRID_CELL_COLUMN_ORIG][:] = (numpy.full(
        len(real_value_indices), 1, dtype=int))

    netcdf_dataset.close()