def create_latlng_grid(min_latitude_deg, max_latitude_deg, latitude_spacing_deg, min_longitude_deg, max_longitude_deg, longitude_spacing_deg): """Creates lat-long grid. M = number of rows in grid N = number of columns in grid :param min_latitude_deg: Minimum latitude (deg N) in grid. :param max_latitude_deg: Max latitude (deg N) in grid. :param latitude_spacing_deg: Spacing (deg N) between grid points in adjacent rows. :param min_longitude_deg: Minimum longitude (deg E) in grid. :param max_longitude_deg: Max longitude (deg E) in grid. :param longitude_spacing_deg: Spacing (deg E) between grid points in adjacent columns. :return: grid_point_latitudes_deg: length-M numpy array with latitudes (deg N) of grid points. :return: grid_point_longitudes_deg: length-N numpy array with longitudes (deg E) of grid points. """ # TODO(thunderhoser): Make this handle wrap-around issues. min_longitude_deg = longitude_conv.convert_lng_positive_in_west( min_longitude_deg) max_longitude_deg = longitude_conv.convert_lng_positive_in_west( max_longitude_deg) min_latitude_deg = number_rounding.floor_to_nearest( min_latitude_deg, latitude_spacing_deg) max_latitude_deg = number_rounding.ceiling_to_nearest( max_latitude_deg, latitude_spacing_deg) min_longitude_deg = number_rounding.floor_to_nearest( min_longitude_deg, longitude_spacing_deg) max_longitude_deg = number_rounding.ceiling_to_nearest( max_longitude_deg, longitude_spacing_deg) num_grid_rows = 1 + int( numpy.round( (max_latitude_deg - min_latitude_deg) / latitude_spacing_deg)) num_grid_columns = 1 + int( numpy.round( (max_longitude_deg - min_longitude_deg) / longitude_spacing_deg)) return grids.get_latlng_grid_points(min_latitude_deg=min_latitude_deg, min_longitude_deg=min_longitude_deg, lat_spacing_deg=latitude_spacing_deg, lng_spacing_deg=longitude_spacing_deg, num_rows=num_grid_rows, num_columns=num_grid_columns)
def plot_parallels(basemap_object, axes_object, min_latitude_deg=None, max_latitude_deg=None, num_parallels=DEFAULT_NUM_PARALLELS, line_width=DEFAULT_GRID_LINE_WIDTH, line_colour=DEFAULT_GRID_LINE_COLOUR, z_order=DEFAULT_GRID_LINE_Z_ORDER): """Plots parallels (grid lines for latitude). If `min_latitude_deg` and `max_latitude_deg` are both None, this method will take plotting limits from `basemap_object`. :param basemap_object: See doc for `plot_countries`. :param axes_object: Same. :param min_latitude_deg: Minimum latitude for grid lines. :param max_latitude_deg: Max latitude for grid lines. :param num_parallels: Number of parallels. :param line_width: See doc for `plot_countries`. :param line_colour: Same. :param z_order: Same. """ if min_latitude_deg is None or max_latitude_deg is None: min_latitude_deg = basemap_object.llcrnrlat max_latitude_deg = basemap_object.urcrnrlat error_checking.assert_is_valid_latitude(min_latitude_deg) error_checking.assert_is_valid_latitude(max_latitude_deg) error_checking.assert_is_greater(max_latitude_deg, min_latitude_deg) error_checking.assert_is_integer(num_parallels) error_checking.assert_is_geq(num_parallels, 2) parallel_spacing_deg = ((max_latitude_deg - min_latitude_deg) / (num_parallels - 1)) if parallel_spacing_deg < 1.: parallel_spacing_deg = number_rounding.round_to_nearest( parallel_spacing_deg, 0.1) else: parallel_spacing_deg = numpy.round(parallel_spacing_deg) min_latitude_deg = number_rounding.ceiling_to_nearest( min_latitude_deg, parallel_spacing_deg) max_latitude_deg = number_rounding.floor_to_nearest( max_latitude_deg, parallel_spacing_deg) num_parallels = 1 + int( numpy.round( (max_latitude_deg - min_latitude_deg) / parallel_spacing_deg)) latitudes_deg = numpy.linspace(min_latitude_deg, max_latitude_deg, num=num_parallels) basemap_object.drawparallels(latitudes_deg, color=colour_from_numpy_to_tuple(line_colour), linewidth=line_width, labels=[True, False, False, False], ax=axes_object, zorder=z_order)
def test_ceiling_to_nearest_array(self): """Ensures correct output from ceiling_to_nearest with array input.""" ceiling_values = number_rounding.ceiling_to_nearest( INPUT_VALUES, ROUNDING_BASE) self.assertTrue( numpy.allclose(ceiling_values, EXPECTED_CEILING_VALUES, atol=TOLERANCE))
def test_ceiling_to_nearest_scalar(self): """Ensures correct output from ceiling_to_nearest with scalar input.""" ceiling_of_scalar = number_rounding.ceiling_to_nearest( INPUT_VALUE_SCALAR, ROUNDING_BASE) self.assertTrue( numpy.isclose(ceiling_of_scalar, EXPECTED_CEILING_OF_SCALAR, atol=TOLERANCE))
def range_and_interval_to_list(start_time_unix_sec=None, end_time_unix_sec=None, time_interval_sec=None, include_endpoint=True): """Converts time period from range and interval to list of exact times. N = number of exact times :param start_time_unix_sec: Start time (Unix format). :param end_time_unix_sec: End time (Unix format). :param time_interval_sec: Interval (seconds) between successive exact times. :param include_endpoint: Boolean flag. If True, endpoint will be included in list of time steps. If False, endpoint will be excluded. :return: unix_times_sec: length-N numpy array of exact times (Unix format). """ error_checking.assert_is_integer(start_time_unix_sec) error_checking.assert_is_not_nan(start_time_unix_sec) error_checking.assert_is_integer(end_time_unix_sec) error_checking.assert_is_not_nan(end_time_unix_sec) error_checking.assert_is_integer(time_interval_sec) error_checking.assert_is_boolean(include_endpoint) if include_endpoint: error_checking.assert_is_geq(end_time_unix_sec, start_time_unix_sec) else: error_checking.assert_is_greater(end_time_unix_sec, start_time_unix_sec) start_time_unix_sec = int( rounder.floor_to_nearest(float(start_time_unix_sec), time_interval_sec)) end_time_unix_sec = int( rounder.ceiling_to_nearest(float(end_time_unix_sec), time_interval_sec)) if not include_endpoint: end_time_unix_sec -= time_interval_sec num_time_steps = 1 + int( numpy.round( (end_time_unix_sec - start_time_unix_sec) / time_interval_sec)) return numpy.linspace(start_time_unix_sec, end_time_unix_sec, num=num_time_steps, dtype=int)
def plot_meridians(basemap_object=None, axes_object=None, bottom_left_lng_deg=None, upper_right_lng_deg=None, meridian_spacing_deg=DEFAULT_MERIDIAN_SPACING_DEG, line_width=DEFAULT_PARALLEL_MERIDIAN_WIDTH, line_colour=DEFAULT_PARALLEL_MERIDIAN_COLOUR): """Draws meridians (lines of equal longitude). :param basemap_object: Instance of `mpl_toolkits.basemap.Basemap`. :param axes_object: Instance of `matplotlib.axes._subplots.AxesSubplot`. :param bottom_left_lng_deg: Longitude at bottom-left corner (deg E). :param upper_right_lng_deg: Longitude at upper-right corner (deg E). :param meridian_spacing_deg: Spacing between successive meridians (deg N). :param line_width: Line width (real positive number). :param line_colour: Colour (in any format accepted by `matplotlib.colors`). """ bottom_left_lng_deg = lng_conversion.convert_lng_positive_in_west( bottom_left_lng_deg) upper_right_lng_deg = lng_conversion.convert_lng_positive_in_west( upper_right_lng_deg) error_checking.assert_is_greater(upper_right_lng_deg, bottom_left_lng_deg) error_checking.assert_is_greater(meridian_spacing_deg, 0) min_meridian_deg = rounder.ceiling_to_nearest(bottom_left_lng_deg, meridian_spacing_deg) max_meridian_deg = rounder.floor_to_nearest(upper_right_lng_deg, meridian_spacing_deg) num_meridians_deg = int(1 + (max_meridian_deg - min_meridian_deg) / meridian_spacing_deg) meridians_deg = numpy.linspace(min_meridian_deg, max_meridian_deg, num=num_meridians_deg) basemap_object.drawmeridians(meridians_deg, color=line_colour, linewidth=line_width, labels=[False, False, False, True], ax=axes_object, zorder=Z_ORDER_MERIDIANS_AND_PARALLELS)
def plot_parallels(basemap_object=None, axes_object=None, bottom_left_lat_deg=None, upper_right_lat_deg=None, parallel_spacing_deg=DEFAULT_PARALLEL_SPACING_DEG, line_width=DEFAULT_PARALLEL_MERIDIAN_WIDTH, line_colour=DEFAULT_PARALLEL_MERIDIAN_COLOUR): """Draws parallels (lines of equal latitude). :param basemap_object: Instance of `mpl_toolkits.basemap.Basemap`. :param axes_object: Instance of `matplotlib.axes._subplots.AxesSubplot`. :param bottom_left_lat_deg: Latitude at bottom-left corner (deg N). :param upper_right_lat_deg: Latitude at upper-right corner (deg N). :param parallel_spacing_deg: Spacing between successive parallels (deg N). :param line_width: Line width (real positive number). :param line_colour: Colour (in any format accepted by `matplotlib.colors`). """ error_checking.assert_is_valid_latitude(bottom_left_lat_deg) error_checking.assert_is_valid_latitude(upper_right_lat_deg) error_checking.assert_is_greater(upper_right_lat_deg, bottom_left_lat_deg) error_checking.assert_is_greater(parallel_spacing_deg, 0) min_parallel_deg = rounder.ceiling_to_nearest(bottom_left_lat_deg, parallel_spacing_deg) max_parallel_deg = rounder.floor_to_nearest(upper_right_lat_deg, parallel_spacing_deg) num_parallels_deg = int(1 + (max_parallel_deg - min_parallel_deg) / parallel_spacing_deg) parallels_deg = numpy.linspace(min_parallel_deg, max_parallel_deg, num=num_parallels_deg) basemap_object.drawparallels(parallels_deg, color=line_colour, linewidth=line_width, labels=[True, False, False, False], ax=axes_object, zorder=Z_ORDER_MERIDIANS_AND_PARALLELS)
def get_times_needed_for_interp(query_times_unix_sec=None, model_time_step_hours=None, method_string=None): """Finds model times needed for interpolation to each range of query times. Q = number of query times M = number of model times needed :param query_times_unix_sec: length-Q numpy array of query times (Unix format). :param model_time_step_hours: Model time step. If interpolating between forecast times (from the same initialization), this should be the model's time resolution (hours between successive forecasts). If interpolating between model runs (forecasts for the same valid time but from different initializations), this should be the model's refresh time (hours between successive model runs). :param method_string: Interpolation method. Valid options are "previous", "next", "linear", "nearest", "zero", "slinear", "quadratic", and "cubic". The last 6 methods are described in the documentation for `scipy.interpolate.interp1d`. :return: model_times_unix_sec: length-M numpy array of model times needed (Unix format). :return: query_to_model_times_table: pandas DataFrame with the following columns. Each row corresponds to one range of query times. query_to_model_times_table.min_query_time_unix_sec: Minimum query time for this range. query_to_model_times_table.max_query_time_unix_sec: Max query time for this range. query_to_model_times_table.model_times_unix_sec: 1-D numpy array of model times needed for this range. query_to_model_times_table.model_time_needed_flags: length-M numpy array of Boolean flags. If model_time_needed_flags[i] = True at row j, this means the [i]th model time is needed for interp to the [j]th range of query times. """ error_checking.assert_is_integer_numpy_array(query_times_unix_sec) error_checking.assert_is_numpy_array_without_nan(query_times_unix_sec) error_checking.assert_is_numpy_array(query_times_unix_sec, num_dimensions=1) model_time_step_hours = int(numpy.round(model_time_step_hours)) error_checking.assert_is_string(method_string) model_time_step_sec = model_time_step_hours * HOURS_TO_SECONDS min_min_query_time_unix_sec = rounder.floor_to_nearest( float(numpy.min(query_times_unix_sec)), model_time_step_sec) max_max_query_time_unix_sec = rounder.ceiling_to_nearest( float(numpy.max(query_times_unix_sec)), model_time_step_sec) if max_max_query_time_unix_sec == min_min_query_time_unix_sec: max_max_query_time_unix_sec += model_time_step_sec num_ranges = int( (max_max_query_time_unix_sec - min_min_query_time_unix_sec) / model_time_step_sec) min_query_times_unix_sec = numpy.linspace(min_min_query_time_unix_sec, max_max_query_time_unix_sec - model_time_step_sec, num=num_ranges, dtype=int) max_query_times_unix_sec = numpy.linspace(min_min_query_time_unix_sec + model_time_step_sec, max_max_query_time_unix_sec, num=num_ranges, dtype=int) if method_string == PREVIOUS_INTERP_METHOD: min_model_time_unix_sec = copy.deepcopy(min_min_query_time_unix_sec) max_model_time_unix_sec = (max_max_query_time_unix_sec - model_time_step_sec) elif method_string == NEXT_INTERP_METHOD: min_model_time_unix_sec = (min_min_query_time_unix_sec + model_time_step_sec) max_model_time_unix_sec = copy.deepcopy(max_max_query_time_unix_sec) elif method_string in SUPERLINEAR_INTERP_METHODS: min_model_time_unix_sec = (min_min_query_time_unix_sec - model_time_step_sec) max_model_time_unix_sec = (max_max_query_time_unix_sec + model_time_step_sec) else: min_model_time_unix_sec = copy.deepcopy(min_min_query_time_unix_sec) max_model_time_unix_sec = copy.deepcopy(max_max_query_time_unix_sec) num_model_times = int((max_model_time_unix_sec - min_model_time_unix_sec) / model_time_step_sec) + 1 model_times_unix_sec = numpy.linspace(min_model_time_unix_sec, max_model_time_unix_sec, num=num_model_times, dtype=int) query_to_model_times_dict = { MIN_QUERY_TIME_COLUMN: min_query_times_unix_sec, MAX_QUERY_TIME_COLUMN: max_query_times_unix_sec } query_to_model_times_table = pandas.DataFrame.from_dict( query_to_model_times_dict) nested_array = query_to_model_times_table[[ MIN_QUERY_TIME_COLUMN, MIN_QUERY_TIME_COLUMN ]].values.tolist() argument_dict = { MODEL_TIMES_COLUMN: nested_array, MODEL_TIMES_NEEDED_COLUMN: nested_array } query_to_model_times_table = query_to_model_times_table.assign( **argument_dict) for i in range(num_ranges): if method_string == PREVIOUS_INTERP_METHOD: these_model_times_unix_sec = numpy.array( [min_query_times_unix_sec[i]], dtype=int) elif method_string == NEXT_INTERP_METHOD: these_model_times_unix_sec = numpy.array( [max_query_times_unix_sec[i]], dtype=int) elif method_string in SUPERLINEAR_INTERP_METHODS: these_model_times_unix_sec = numpy.array([ min_query_times_unix_sec[i] - model_time_step_sec, min_query_times_unix_sec[i], max_query_times_unix_sec[i], max_query_times_unix_sec[i] + model_time_step_sec ], dtype=int) else: these_model_times_unix_sec = numpy.array( [min_query_times_unix_sec[i], max_query_times_unix_sec[i]], dtype=int) query_to_model_times_table[MODEL_TIMES_COLUMN].values[ i] = these_model_times_unix_sec query_to_model_times_table[MODEL_TIMES_NEEDED_COLUMN].values[i] = [ t in these_model_times_unix_sec for t in model_times_unix_sec ] return model_times_unix_sec, query_to_model_times_table
def plot_histogram( input_values, num_bins, min_value, max_value, axes_object, x_tick_spacing_num_bins, y_tick_spacing=None, bar_face_colour=DEFAULT_HISTOGRAM_FACE_COLOUR, bar_edge_colour=DEFAULT_HISTOGRAM_EDGE_COLOUR, bar_edge_width=DEFAULT_HISTOGRAM_EDGE_WIDTH): """Plots histogram. :param input_values: See documentation for `histograms.create_histogram`. :param num_bins: See documentation for `histograms.create_histogram`. :param min_value: See documentation for `histograms.create_histogram`. :param max_value: See documentation for `histograms.create_histogram`. :param axes_object: Instance of `matplotlib.axes._subplots.AxesSubplot`. :param x_tick_spacing_num_bins: Spacing between adjacent tick marks on x-axis, in terms of # bins. :param y_tick_spacing: Spacing between adjacent tick marks on y-axis, in terms of frequency. :param bar_face_colour: Colour (in any format accepted by `matplotlib.colors`) for interior of each bar. :param bar_edge_colour: Colour for edge of each bar. :param bar_edge_width: Width for edge of each bar. """ # TODO(thunderhoser): Make input args nicer, especially `y_tick_spacing`. error_checking.assert_is_integer(x_tick_spacing_num_bins) error_checking.assert_is_greater(x_tick_spacing_num_bins, 0) _, num_examples_by_bin = histograms.create_histogram( input_values=input_values, num_bins=num_bins, min_value=min_value, max_value=max_value) fraction_of_examples_by_bin = ( num_examples_by_bin.astype(float) / numpy.sum(num_examples_by_bin) ) bin_edges = numpy.linspace(min_value, max_value, num=num_bins + 1) bin_width = bin_edges[1] - bin_edges[0] bin_centers = bin_edges[:-1] + bin_width / 2 axes_object.bar( bin_centers, fraction_of_examples_by_bin, bin_width, color=plotting_utils.colour_from_numpy_to_tuple(bar_face_colour), edgecolor=plotting_utils.colour_from_numpy_to_tuple(bar_edge_colour), linewidth=bar_edge_width) x_tick_indices = numpy.arange( 0, num_bins - 1, step=x_tick_spacing_num_bins, dtype=int) x_tick_indices = x_tick_indices[x_tick_indices < num_bins] x_tick_values = bin_centers[x_tick_indices] pyplot.xticks(x_tick_values, axes=axes_object) axes_object.set_xlim(bin_edges[0], bin_edges[-1]) if y_tick_spacing is not None: error_checking.assert_is_greater(y_tick_spacing, 0.) max_y_tick_value = rounder.ceiling_to_nearest( numpy.max(fraction_of_examples_by_bin), y_tick_spacing) num_y_ticks = 1 + int(numpy.round(max_y_tick_value / y_tick_spacing)) y_tick_values = numpy.linspace(0., max_y_tick_value, num=num_y_ticks) pyplot.yticks(y_tick_values, axes=axes_object) axes_object.set_ylim(0., max_y_tick_value)
def plot_meridians(basemap_object, axes_object, min_longitude_deg=None, max_longitude_deg=None, num_meridians=DEFAULT_NUM_MERIDIANS, line_width=DEFAULT_GRID_LINE_WIDTH, line_colour=DEFAULT_GRID_LINE_COLOUR, z_order=DEFAULT_GRID_LINE_Z_ORDER): """Plots meridians (grid lines for longitude). If `min_longitude_deg` and `max_longitude_deg` are both None, this method will take plotting limits from `basemap_object`. :param basemap_object: See doc for `plot_countries`. :param axes_object: Same. :param min_longitude_deg: Minimum longitude for grid lines. :param max_longitude_deg: Max longitude for grid lines. :param num_meridians: Number of meridians. :param line_width: See doc for `plot_countries`. :param line_colour: Same. :param z_order: Same. """ if min_longitude_deg is None or max_longitude_deg is None: min_longitude_deg = basemap_object.llcrnrlon max_longitude_deg = basemap_object.urcrnrlon min_longitude_deg = lng_conversion.convert_lng_positive_in_west( min_longitude_deg) max_longitude_deg = lng_conversion.convert_lng_positive_in_west( max_longitude_deg) error_checking.assert_is_greater(max_longitude_deg, min_longitude_deg) error_checking.assert_is_integer(num_meridians) error_checking.assert_is_geq(num_meridians, 2) meridian_spacing_deg = ((max_longitude_deg - min_longitude_deg) / (num_meridians - 1)) if meridian_spacing_deg < 1.: meridian_spacing_deg = number_rounding.round_to_nearest( meridian_spacing_deg, 0.1) else: meridian_spacing_deg = numpy.round(meridian_spacing_deg) min_longitude_deg = number_rounding.ceiling_to_nearest( min_longitude_deg, meridian_spacing_deg) max_longitude_deg = number_rounding.floor_to_nearest( max_longitude_deg, meridian_spacing_deg) num_meridians = 1 + int( numpy.round( (max_longitude_deg - min_longitude_deg) / meridian_spacing_deg)) longitudes_deg = numpy.linspace(min_longitude_deg, max_longitude_deg, num=num_meridians) basemap_object.drawmeridians(longitudes_deg, color=colour_from_numpy_to_tuple(line_colour), linewidth=line_width, labels=[False, False, False, True], ax=axes_object, zorder=z_order)
def get_times_needed_for_interp(query_times_unix_sec, model_time_step_hours, method_string): """Finds model times needed for interp to each query time. Q = number of query times M = number of model times needed :param query_times_unix_sec: length-Q numpy array of query times. :param model_time_step_hours: Model time step. If interpolating between forecast times from the same run, this should be time between successive forecasts. If interpolating between model runs, this should be time between successive model runs. :param method_string: Interpolation method. :return: model_times_unix_sec: length-M numpy array of model times needed. :return: query_to_model_times_table: pandas DataFrame with the following columns. Each row corresponds to a range of query times. query_to_model_times_table.min_query_time_unix_sec: Earliest query time in range. query_to_model_times_table.max_query_time_unix_sec: Latest query time in range. query_to_model_times_table.these_model_times_unix_sec: 1-D numpy array of model times needed for this range. query_to_model_times_table.model_time_needed_flags: length-M numpy array of Boolean flags. If model_time_needed_flags[j] = True, model_times_unix_sec[j] -- or the [j]th element in the first output -- is needed for this range of query times. """ error_checking.assert_is_integer_numpy_array(query_times_unix_sec) error_checking.assert_is_numpy_array(query_times_unix_sec, num_dimensions=1) error_checking.assert_is_string(method_string) model_time_step_hours = int(numpy.round(model_time_step_hours)) model_time_step_sec = model_time_step_hours * HOURS_TO_SECONDS min_min_query_time_unix_sec = rounder.floor_to_nearest( float(numpy.min(query_times_unix_sec)), model_time_step_sec) max_max_query_time_unix_sec = rounder.ceiling_to_nearest( float(numpy.max(query_times_unix_sec)), model_time_step_sec) if max_max_query_time_unix_sec == min_min_query_time_unix_sec: max_max_query_time_unix_sec += model_time_step_sec num_ranges = int( numpy.round( (max_max_query_time_unix_sec - min_min_query_time_unix_sec) / model_time_step_sec)) min_query_times_unix_sec = numpy.linspace(min_min_query_time_unix_sec, max_max_query_time_unix_sec - model_time_step_sec, num=num_ranges, dtype=int) max_query_times_unix_sec = numpy.linspace(min_min_query_time_unix_sec + model_time_step_sec, max_max_query_time_unix_sec, num=num_ranges, dtype=int) if method_string == PREVIOUS_INTERP_METHOD: min_model_time_unix_sec = min_min_query_time_unix_sec + 0 max_model_time_unix_sec = (max_max_query_time_unix_sec - model_time_step_sec) elif method_string == NEXT_INTERP_METHOD: min_model_time_unix_sec = (min_min_query_time_unix_sec + model_time_step_sec) max_model_time_unix_sec = max_max_query_time_unix_sec + 0 elif method_string in SUPERLINEAR_INTERP_METHODS: min_model_time_unix_sec = (min_min_query_time_unix_sec - model_time_step_sec) max_model_time_unix_sec = (max_max_query_time_unix_sec + model_time_step_sec) else: min_model_time_unix_sec = min_min_query_time_unix_sec + 0 max_model_time_unix_sec = max_max_query_time_unix_sec + 0 num_model_times = 1 + int( numpy.round((max_model_time_unix_sec - min_model_time_unix_sec) / model_time_step_sec)) model_times_unix_sec = numpy.linspace(min_model_time_unix_sec, max_model_time_unix_sec, num=num_model_times, dtype=int) query_to_model_times_dict = { MIN_QUERY_TIME_COLUMN: min_query_times_unix_sec, MAX_QUERY_TIME_COLUMN: max_query_times_unix_sec } query_to_model_times_table = pandas.DataFrame.from_dict( query_to_model_times_dict) nested_array = query_to_model_times_table[[ MIN_QUERY_TIME_COLUMN, MIN_QUERY_TIME_COLUMN ]].values.tolist() query_to_model_times_table = query_to_model_times_table.assign( **{ MODEL_TIMES_COLUMN: nested_array, MODEL_TIMES_NEEDED_COLUMN: nested_array }) for i in range(num_ranges): if method_string == PREVIOUS_INTERP_METHOD: these_model_times_unix_sec = min_query_times_unix_sec[[i]] elif method_string == NEXT_INTERP_METHOD: these_model_times_unix_sec = max_query_times_unix_sec[[i]] elif method_string in SUPERLINEAR_INTERP_METHODS: these_model_times_unix_sec = numpy.array([ min_query_times_unix_sec[i] - model_time_step_sec, min_query_times_unix_sec[i], max_query_times_unix_sec[i], max_query_times_unix_sec[i] + model_time_step_sec ], dtype=int) else: these_model_times_unix_sec = numpy.array( [min_query_times_unix_sec[i], max_query_times_unix_sec[i]], dtype=int) query_to_model_times_table[MODEL_TIMES_COLUMN].values[i] = ( these_model_times_unix_sec) query_to_model_times_table[MODEL_TIMES_NEEDED_COLUMN].values[i] = ( numpy.array([ t in these_model_times_unix_sec for t in model_times_unix_sec ], dtype=bool)) return model_times_unix_sec, query_to_model_times_table
def read_metadata_from_raw_file(netcdf_file_name, data_source, raise_error_if_fails=True): """Reads metadata from raw (either MYRORSS or MRMS) file. This file should contain one radar field at one height and valid time. :param netcdf_file_name: Path to input file. :param data_source: Data source (string). :param raise_error_if_fails: Boolean flag. If True and file cannot be read, this method will raise an error. If False and file cannot be read, will return None. :return: metadata_dict: Dictionary with the following keys. metadata_dict['nw_grid_point_lat_deg']: Latitude (deg N) of northwesternmost grid point. metadata_dict['nw_grid_point_lng_deg']: Longitude (deg E) of northwesternmost grid point. metadata_dict['lat_spacing_deg']: Spacing (deg N) between meridionally adjacent grid points. metadata_dict['lng_spacing_deg']: Spacing (deg E) between zonally adjacent grid points. metadata_dict['num_lat_in_grid']: Number of rows (unique grid-point latitudes). metadata_dict['num_lng_in_grid']: Number of columns (unique grid-point longitudes). metadata_dict['height_m_asl']: Radar height (metres above ground level). metadata_dict['unix_time_sec']: Valid time. metadata_dict['field_name']: Name of radar field in GewitterGefahr format. metadata_dict['field_name_orig']: Name of radar field in original (either MYRORSS or MRMS) format. metadata_dict['sentinel_values']: 1-D numpy array of sentinel values. """ error_checking.assert_file_exists(netcdf_file_name) netcdf_dataset = netcdf_io.open_netcdf(netcdf_file_name, raise_error_if_fails) if netcdf_dataset is None: return None field_name_orig = str(getattr(netcdf_dataset, FIELD_NAME_COLUMN_ORIG)) metadata_dict = { radar_utils.NW_GRID_POINT_LAT_COLUMN: getattr(netcdf_dataset, NW_GRID_POINT_LAT_COLUMN_ORIG), radar_utils.NW_GRID_POINT_LNG_COLUMN: lng_conversion.convert_lng_positive_in_west(getattr( netcdf_dataset, NW_GRID_POINT_LNG_COLUMN_ORIG), allow_nan=False), radar_utils.LAT_SPACING_COLUMN: getattr(netcdf_dataset, LAT_SPACING_COLUMN_ORIG), radar_utils.LNG_SPACING_COLUMN: getattr(netcdf_dataset, LNG_SPACING_COLUMN_ORIG), radar_utils.NUM_LAT_COLUMN: netcdf_dataset.dimensions[NUM_LAT_COLUMN_ORIG].size + 1, radar_utils.NUM_LNG_COLUMN: netcdf_dataset.dimensions[NUM_LNG_COLUMN_ORIG].size + 1, radar_utils.HEIGHT_COLUMN: getattr(netcdf_dataset, HEIGHT_COLUMN_ORIG), radar_utils.UNIX_TIME_COLUMN: getattr(netcdf_dataset, UNIX_TIME_COLUMN_ORIG), FIELD_NAME_COLUMN_ORIG: field_name_orig, radar_utils.FIELD_NAME_COLUMN: radar_utils.field_name_orig_to_new(field_name_orig=field_name_orig, data_source_name=data_source) } latitude_spacing_deg = metadata_dict[radar_utils.LAT_SPACING_COLUMN] longitude_spacing_deg = metadata_dict[radar_utils.LNG_SPACING_COLUMN] # TODO(thunderhoser): The following "if" condition is a hack. The purpose # is to change grid corners only for actual MYRORSS data, not GridRad data # in MYRORSS format. if latitude_spacing_deg < 0.011 and longitude_spacing_deg < 0.011: metadata_dict[radar_utils.NW_GRID_POINT_LAT_COLUMN] = ( rounder.floor_to_nearest( metadata_dict[radar_utils.NW_GRID_POINT_LAT_COLUMN], metadata_dict[radar_utils.LAT_SPACING_COLUMN])) metadata_dict[radar_utils.NW_GRID_POINT_LNG_COLUMN] = ( rounder.ceiling_to_nearest( metadata_dict[radar_utils.NW_GRID_POINT_LNG_COLUMN], metadata_dict[radar_utils.LNG_SPACING_COLUMN])) sentinel_values = [] for this_column in SENTINEL_VALUE_COLUMNS_ORIG: sentinel_values.append(getattr(netcdf_dataset, this_column)) metadata_dict.update( {radar_utils.SENTINEL_VALUE_COLUMN: numpy.array(sentinel_values)}) netcdf_dataset.close() return metadata_dict
def _plot_tornado_and_radar(top_myrorss_dir_name, radar_field_name, radar_height_m_asl, spc_date_string, tornado_table, tornado_row, output_file_name): """Plots one unlinked tornado with radar field. :param top_myrorss_dir_name: See documentation at top of file. :param radar_field_name: Same. :param radar_height_m_asl: Same. :param spc_date_string: SPC date for linkage file (format "yyyymmdd"). :param tornado_table: pandas DataFrame created by `linkage._read_input_tornado_reports`. :param tornado_row: Will plot only tornado in [j]th row of table, where j = `tornado_row`. :param output_file_name: Path to output file. Figure will be saved here. """ tornado_time_unix_sec = tornado_table[ linkage.EVENT_TIME_COLUMN].values[tornado_row] radar_time_unix_sec = number_rounding.round_to_nearest( tornado_time_unix_sec, RADAR_TIME_INTERVAL_SEC) radar_spc_date_string = time_conversion.time_to_spc_date_string( radar_time_unix_sec) radar_file_name = myrorss_and_mrms_io.find_raw_file( top_directory_name=top_myrorss_dir_name, spc_date_string=radar_spc_date_string, unix_time_sec=radar_time_unix_sec, data_source=radar_utils.MYRORSS_SOURCE_ID, field_name=radar_field_name, height_m_asl=radar_height_m_asl, raise_error_if_missing=spc_date_string == radar_spc_date_string) if not os.path.isfile(radar_file_name): first_radar_time_unix_sec = number_rounding.ceiling_to_nearest( time_conversion.get_start_of_spc_date(spc_date_string), RADAR_TIME_INTERVAL_SEC) last_radar_time_unix_sec = number_rounding.floor_to_nearest( time_conversion.get_end_of_spc_date(spc_date_string), RADAR_TIME_INTERVAL_SEC) radar_time_unix_sec = max( [radar_time_unix_sec, first_radar_time_unix_sec]) radar_time_unix_sec = min( [radar_time_unix_sec, last_radar_time_unix_sec]) radar_file_name = myrorss_and_mrms_io.find_raw_file( top_directory_name=top_myrorss_dir_name, spc_date_string=spc_date_string, unix_time_sec=radar_time_unix_sec, data_source=radar_utils.MYRORSS_SOURCE_ID, field_name=radar_field_name, height_m_asl=radar_height_m_asl, raise_error_if_missing=True) radar_metadata_dict = myrorss_and_mrms_io.read_metadata_from_raw_file( netcdf_file_name=radar_file_name, data_source=radar_utils.MYRORSS_SOURCE_ID) sparse_grid_table = (myrorss_and_mrms_io.read_data_from_sparse_grid_file( netcdf_file_name=radar_file_name, field_name_orig=radar_metadata_dict[ myrorss_and_mrms_io.FIELD_NAME_COLUMN_ORIG], data_source=radar_utils.MYRORSS_SOURCE_ID, sentinel_values=radar_metadata_dict[radar_utils.SENTINEL_VALUE_COLUMN]) ) radar_matrix, grid_point_latitudes_deg, grid_point_longitudes_deg = ( radar_s2f.sparse_to_full_grid(sparse_grid_table=sparse_grid_table, metadata_dict=radar_metadata_dict)) radar_matrix = numpy.flip(radar_matrix, axis=0) grid_point_latitudes_deg = grid_point_latitudes_deg[::-1] axes_object, basemap_object = ( plotting_utils.create_equidist_cylindrical_map( min_latitude_deg=numpy.min(grid_point_latitudes_deg), max_latitude_deg=numpy.max(grid_point_latitudes_deg), min_longitude_deg=numpy.min(grid_point_longitudes_deg), max_longitude_deg=numpy.max(grid_point_longitudes_deg), resolution_string='i')[1:]) plotting_utils.plot_coastlines(basemap_object=basemap_object, axes_object=axes_object, line_colour=BORDER_COLOUR) plotting_utils.plot_countries(basemap_object=basemap_object, axes_object=axes_object, line_colour=BORDER_COLOUR) plotting_utils.plot_states_and_provinces(basemap_object=basemap_object, axes_object=axes_object, line_colour=BORDER_COLOUR) plotting_utils.plot_parallels(basemap_object=basemap_object, axes_object=axes_object, num_parallels=NUM_PARALLELS) plotting_utils.plot_meridians(basemap_object=basemap_object, axes_object=axes_object, num_meridians=NUM_MERIDIANS) radar_plotting.plot_latlng_grid( field_matrix=radar_matrix, field_name=radar_field_name, axes_object=axes_object, min_grid_point_latitude_deg=numpy.min(grid_point_latitudes_deg), min_grid_point_longitude_deg=numpy.min(grid_point_longitudes_deg), latitude_spacing_deg=numpy.diff(grid_point_latitudes_deg[:2])[0], longitude_spacing_deg=numpy.diff(grid_point_longitudes_deg[:2])[0]) tornado_latitude_deg = tornado_table[ linkage.EVENT_LATITUDE_COLUMN].values[tornado_row] tornado_longitude_deg = tornado_table[ linkage.EVENT_LONGITUDE_COLUMN].values[tornado_row] axes_object.plot(tornado_longitude_deg, tornado_latitude_deg, linestyle='None', marker=TORNADO_MARKER_TYPE, markersize=TORNADO_MARKER_SIZE, markeredgewidth=TORNADO_MARKER_EDGE_WIDTH, markerfacecolor=plotting_utils.colour_from_numpy_to_tuple( TORNADO_MARKER_COLOUR), markeredgecolor=plotting_utils.colour_from_numpy_to_tuple( TORNADO_MARKER_COLOUR)) tornado_time_string = time_conversion.unix_sec_to_string( tornado_time_unix_sec, TIME_FORMAT) title_string = ( 'Unlinked tornado at {0:s}, {1:.2f} deg N, {2:.2f} deg E').format( tornado_time_string, tornado_latitude_deg, tornado_longitude_deg) pyplot.title(title_string, fontsize=TITLE_FONT_SIZE) print('Saving figure to: "{0:s}"...'.format(output_file_name)) pyplot.savefig(output_file_name, dpi=FIGURE_RESOLUTION_DPI) pyplot.close() imagemagick_utils.trim_whitespace(input_file_name=output_file_name, output_file_name=output_file_name)
central_longitude_deg=central_longitude_deg) # Project lat-long grid points to x-y. (grid_point_x_matrix_metres, grid_point_y_matrix_metres) = projections.project_latlng_to_xy( latitudes_deg=lats, longitudes_deg=lons, projection_object=projection_object) x_min_metres = numpy.min(grid_point_x_matrix_metres) x_max_metres = numpy.max(grid_point_x_matrix_metres) y_min_metres = numpy.min(grid_point_y_matrix_metres) y_max_metres = numpy.max(grid_point_y_matrix_metres) # Round corners to nearest 10 km. These will become the corners of the actual # x-y grid. x_min_metres = number_rounding.floor_to_nearest(x_min_metres, X_SPACING_METRES) x_max_metres = number_rounding.ceiling_to_nearest(x_max_metres, X_SPACING_METRES) y_min_metres = number_rounding.floor_to_nearest(y_min_metres, Y_SPACING_METRES) y_max_metres = number_rounding.ceiling_to_nearest(y_max_metres, Y_SPACING_METRES) num_grid_rows = 1 + int(numpy.round((y_max_metres - y_min_metres) / Y_SPACING_METRES)) num_grid_columns = 1 + int(numpy.round((x_max_metres - x_min_metres) / X_SPACING_METRES)) (unique_grid_point_x_metres, unique_grid_point_y_metres) = grids.get_xy_grid_points( x_min_metres=x_min_metres, y_min_metres=y_min_metres, x_spacing_metres=X_SPACING_METRES, y_spacing_metres=Y_SPACING_METRES, num_rows=num_grid_rows, num_columns=num_grid_columns) (grid_point_x_matrix_metres, grid_point_y_matrix_metres) = grids.xy_vectors_to_matrices( x_unique_metres=unique_grid_point_x_metres,
def create_equidistant_grid(min_latitude_deg, max_latitude_deg, min_longitude_deg, max_longitude_deg, x_spacing_metres, y_spacing_metres, azimuthal=True): """Creates equidistant grid. M = number of rows N = number of columns :param min_latitude_deg: Minimum latitude (deg N) in grid. :param max_latitude_deg: Max latitude (deg N) in grid. :param min_longitude_deg: Minimum longitude (deg E) in grid. :param max_longitude_deg: Max longitude (deg E) in grid. :param x_spacing_metres: Spacing between grid points in adjacent columns. :param y_spacing_metres: Spacing between grid points in adjacent rows. :param azimuthal: Boolean flag. If True, will create azimuthal equidistant grid. If False, will create Lambert conformal grid. :return: grid_dict: Dictionary with the following keys. grid_dict['grid_point_x_coords_metres']: length-N numpy array with unique x-coordinates at grid points. grid_dict['grid_point_y_coords_metres']: length-M numpy array with unique y-coordinates at grid points. grid_dict['projection_object']: Instance of `pyproj.Proj` (used to convert between lat-long coordinates and the x-y coordinates of the grid). """ # Check input args. error_checking.assert_is_valid_latitude(min_latitude_deg) error_checking.assert_is_valid_latitude(max_latitude_deg) error_checking.assert_is_greater(max_latitude_deg, min_latitude_deg) error_checking.assert_is_greater(x_spacing_metres, 0.) error_checking.assert_is_greater(y_spacing_metres, 0.) error_checking.assert_is_boolean(azimuthal) min_longitude_deg = lng_conversion.convert_lng_negative_in_west( min_longitude_deg, allow_nan=False) max_longitude_deg = lng_conversion.convert_lng_negative_in_west( max_longitude_deg, allow_nan=False) error_checking.assert_is_greater(max_longitude_deg, min_longitude_deg) # Create lat-long grid. num_grid_rows = 1 + int( numpy.round((max_latitude_deg - min_latitude_deg) / DUMMY_LATITUDE_SPACING_DEG)) num_grid_columns = 1 + int( numpy.round((max_longitude_deg - min_longitude_deg) / DUMMY_LONGITUDE_SPACING_DEG)) unique_latitudes_deg, unique_longitudes_deg = get_latlng_grid_points( min_latitude_deg=min_latitude_deg, min_longitude_deg=min_longitude_deg, lat_spacing_deg=DUMMY_LATITUDE_SPACING_DEG, lng_spacing_deg=DUMMY_LONGITUDE_SPACING_DEG, num_rows=num_grid_rows, num_columns=num_grid_columns) latitude_matrix_deg, longitude_matrix_deg = latlng_vectors_to_matrices( unique_latitudes_deg=unique_latitudes_deg, unique_longitudes_deg=unique_longitudes_deg) # Create projection. central_latitude_deg = 0.5 * (min_latitude_deg + max_latitude_deg) central_longitude_deg = 0.5 * (min_longitude_deg + max_longitude_deg) if azimuthal: projection_object = projections.init_azimuthal_equidistant_projection( central_latitude_deg=central_latitude_deg, central_longitude_deg=central_longitude_deg) else: projection_object = projections.init_lcc_projection( standard_latitudes_deg=numpy.full(2, central_latitude_deg), central_longitude_deg=central_longitude_deg) # Convert lat-long grid to preliminary x-y grid. prelim_x_matrix_metres, prelim_y_matrix_metres = ( projections.project_latlng_to_xy(latitudes_deg=latitude_matrix_deg, longitudes_deg=longitude_matrix_deg, projection_object=projection_object)) # Find corners of preliminary x-y grid. x_min_metres = numpy.min(prelim_x_matrix_metres) x_max_metres = numpy.max(prelim_x_matrix_metres) y_min_metres = numpy.min(prelim_y_matrix_metres) y_max_metres = numpy.max(prelim_y_matrix_metres) # Find corners of final x-y grid. x_min_metres = number_rounding.floor_to_nearest(x_min_metres, x_spacing_metres) x_max_metres = number_rounding.ceiling_to_nearest(x_max_metres, x_spacing_metres) y_min_metres = number_rounding.floor_to_nearest(y_min_metres, y_spacing_metres) y_max_metres = number_rounding.ceiling_to_nearest(y_max_metres, y_spacing_metres) # Create final x-y grid. num_grid_rows = 1 + int( numpy.round((y_max_metres - y_min_metres) / y_spacing_metres)) num_grid_columns = 1 + int( numpy.round((x_max_metres - x_min_metres) / x_spacing_metres)) unique_x_coords_metres, unique_y_coords_metres = get_xy_grid_points( x_min_metres=x_min_metres, y_min_metres=y_min_metres, x_spacing_metres=x_spacing_metres, y_spacing_metres=y_spacing_metres, num_rows=num_grid_rows, num_columns=num_grid_columns) return { X_COORDS_KEY: unique_x_coords_metres, Y_COORDS_KEY: unique_y_coords_metres, PROJECTION_KEY: projection_object }
def _plot_one_composite(gradcam_file_name, monte_carlo_file_name, composite_name_abbrev, composite_name_verbose, colour_map_object, min_colour_value, max_colour_value, num_contours, smoothing_radius_grid_cells, output_dir_name): """Plots class-activation map for one composite. :param gradcam_file_name: Path to input file (will be read by `gradcam.read_file`). :param monte_carlo_file_name: Path to Monte Carlo file (will be read by `_read_monte_carlo_file`). :param composite_name_abbrev: Abbrev composite name (will be used in file names). :param composite_name_verbose: Verbose composite name (will be used in figure title). :param colour_map_object: See documentation at top of file. :param min_colour_value: Minimum value in colour bar (may be NaN). :param max_colour_value: Max value in colour bar (may be NaN). :param num_contours: See documentation at top of file. :param smoothing_radius_grid_cells: Same. :param output_dir_name: Name of output directory (figures will be saved here). :return: main_figure_file_name: Path to main image file created by this method. :return: min_colour_value: Same as input but cannot be None. :return: max_colour_value: Same as input but cannot be None. """ (mean_radar_matrix, mean_class_activn_matrix, significance_matrix, model_metadata_dict) = _read_one_composite( gradcam_file_name=gradcam_file_name, smoothing_radius_grid_cells=smoothing_radius_grid_cells, monte_carlo_file_name=monte_carlo_file_name) print(numpy.percentile(mean_class_activn_matrix, 0.)) print(numpy.percentile(mean_class_activn_matrix, 1.)) print(numpy.percentile(mean_class_activn_matrix, 99.)) print(numpy.percentile(mean_class_activn_matrix, 100.)) if numpy.isnan(min_colour_value) or numpy.isnan(max_colour_value): min_colour_value_log10 = number_rounding.floor_to_nearest( numpy.log10(numpy.percentile(mean_class_activn_matrix, 1.)), 0.1) max_colour_value_log10 = number_rounding.ceiling_to_nearest( numpy.log10(numpy.percentile(mean_class_activn_matrix, 99.)), 0.1) min_colour_value_log10 = max([min_colour_value_log10, -2.]) max_colour_value_log10 = max([max_colour_value_log10, -1.]) min_colour_value_log10 = min([min_colour_value_log10, 1.]) max_colour_value_log10 = min([max_colour_value_log10, 2.]) min_colour_value = 10**min_colour_value_log10 max_colour_value = 10**max_colour_value_log10 else: min_colour_value_log10 = numpy.log10(min_colour_value) max_colour_value_log10 = numpy.log10(max_colour_value) contour_interval_log10 = ( (max_colour_value_log10 - min_colour_value_log10) / (num_contours - 1)) mean_activn_matrix_log10 = numpy.log10(mean_class_activn_matrix) training_option_dict = model_metadata_dict[cnn.TRAINING_OPTION_DICT_KEY] field_names = training_option_dict[trainval_io.RADAR_FIELDS_KEY] num_fields = mean_radar_matrix.shape[-1] num_heights = mean_radar_matrix.shape[-2] handle_dict = plot_examples.plot_one_example( list_of_predictor_matrices=[mean_radar_matrix], model_metadata_dict=model_metadata_dict, pmm_flag=True, allow_whitespace=True, plot_panel_names=True, panel_name_font_size=PANEL_NAME_FONT_SIZE, add_titles=False, label_colour_bars=True, colour_bar_length=COLOUR_BAR_LENGTH, colour_bar_font_size=COLOUR_BAR_FONT_SIZE, num_panel_rows=num_heights) figure_objects = handle_dict[plot_examples.RADAR_FIGURES_KEY] axes_object_matrices = handle_dict[plot_examples.RADAR_AXES_KEY] for k in range(num_fields): cam_plotting.plot_many_2d_grids( class_activation_matrix_3d=numpy.flip( mean_activn_matrix_log10[0, ...], axis=0), axes_object_matrix=axes_object_matrices[k], colour_map_object=colour_map_object, min_contour_level=min_colour_value_log10, max_contour_level=max_colour_value_log10, contour_interval=contour_interval_log10) significance_plotting.plot_many_2d_grids_without_coords( significance_matrix=numpy.flip(significance_matrix[0, ...], axis=0), axes_object_matrix=axes_object_matrices[k]) panel_file_names = [None] * num_fields for k in range(num_fields): panel_file_names[k] = '{0:s}/{1:s}_{2:s}.jpg'.format( output_dir_name, composite_name_abbrev, field_names[k].replace('_', '-')) print('Saving figure to: "{0:s}"...'.format(panel_file_names[k])) figure_objects[k].savefig(panel_file_names[k], dpi=FIGURE_RESOLUTION_DPI, pad_inches=0, bbox_inches='tight') pyplot.close(figure_objects[k]) main_figure_file_name = '{0:s}/{1:s}_gradcam.jpg'.format( output_dir_name, composite_name_abbrev) print('Concatenating panels to: "{0:s}"...'.format(main_figure_file_name)) imagemagick_utils.concatenate_images( input_file_names=panel_file_names, output_file_name=main_figure_file_name, num_panel_rows=1, num_panel_columns=num_fields, border_width_pixels=50) imagemagick_utils.resize_image(input_file_name=main_figure_file_name, output_file_name=main_figure_file_name, output_size_pixels=CONCAT_FIGURE_SIZE_PX) imagemagick_utils.trim_whitespace(input_file_name=main_figure_file_name, output_file_name=main_figure_file_name, border_width_pixels=TITLE_FONT_SIZE + 25) _overlay_text(image_file_name=main_figure_file_name, x_offset_from_center_px=0, y_offset_from_top_px=0, text_string=composite_name_verbose) imagemagick_utils.trim_whitespace(input_file_name=main_figure_file_name, output_file_name=main_figure_file_name, border_width_pixels=10) return main_figure_file_name, min_colour_value, max_colour_value
def read_metadata_from_raw_file(netcdf_file_name, data_source=None, raise_error_if_fails=True): """Reads metadata raw (either MYRORSS or MRMS) file.. This file should contain one radar field at one height and one time step. :param netcdf_file_name: Path to input file. :param data_source: Data source (either "myrorss" or "mrms"). :param raise_error_if_fails: Boolean flag. If True and file cannot be opened, this method will raise an error. If False and file cannot be opened, this method will return None. :return: metadata_dict: Dictionary with the following keys. metadata_dict['nw_grid_point_lat_deg']: Latitude (deg N) of northwesternmost grid point. metadata_dict['nw_grid_point_lng_deg']: Longitude (deg E) of northwesternmost grid point. metadata_dict['lat_spacing_deg']: Spacing (deg N) between adjacent rows. metadata_dict['lng_spacing_deg']: Spacing (deg E) between adjacent columns. metadata_dict['num_lat_in_grid']: Number of rows (unique grid-point latitudes). metadata_dict['num_lng_in_grid']: Number of columns (unique grid-point longitudes). metadata_dict['height_m_agl']: Height (metres above ground level). metadata_dict['unix_time_sec']: Time in Unix format. metadata_dict['field_name']: Name of radar field in new format. metadata_dict['field_name_orig']: Name of radar field in original (MYRORSS or MRMS) format. metadata_dict['sentinel_values']: 1-D numpy array of sentinel values. """ error_checking.assert_file_exists(netcdf_file_name) netcdf_dataset = netcdf_io.open_netcdf(netcdf_file_name, raise_error_if_fails) if netcdf_dataset is None: return None field_name_orig = str(getattr(netcdf_dataset, FIELD_NAME_COLUMN_ORIG)) metadata_dict = { NW_GRID_POINT_LAT_COLUMN: getattr(netcdf_dataset, NW_GRID_POINT_LAT_COLUMN_ORIG), NW_GRID_POINT_LNG_COLUMN: lng_conversion.convert_lng_positive_in_west( getattr(netcdf_dataset, NW_GRID_POINT_LNG_COLUMN_ORIG), allow_nan=False), LAT_SPACING_COLUMN: getattr(netcdf_dataset, LAT_SPACING_COLUMN_ORIG), LNG_SPACING_COLUMN: getattr(netcdf_dataset, LNG_SPACING_COLUMN_ORIG), NUM_LAT_COLUMN: netcdf_dataset.dimensions[NUM_LAT_COLUMN_ORIG].size + 1, NUM_LNG_COLUMN: netcdf_dataset.dimensions[NUM_LNG_COLUMN_ORIG].size + 1, HEIGHT_COLUMN: getattr(netcdf_dataset, HEIGHT_COLUMN_ORIG), UNIX_TIME_COLUMN: getattr(netcdf_dataset, UNIX_TIME_COLUMN_ORIG), FIELD_NAME_COLUMN_ORIG: field_name_orig, FIELD_NAME_COLUMN: _field_name_orig_to_new(field_name_orig, data_source=data_source)} metadata_dict[NW_GRID_POINT_LAT_COLUMN] = rounder.floor_to_nearest( metadata_dict[NW_GRID_POINT_LAT_COLUMN], metadata_dict[LAT_SPACING_COLUMN]) metadata_dict[NW_GRID_POINT_LNG_COLUMN] = rounder.ceiling_to_nearest( metadata_dict[NW_GRID_POINT_LNG_COLUMN], metadata_dict[LNG_SPACING_COLUMN]) sentinel_values = numpy.full(len(SENTINEL_VALUE_COLUMNS_ORIG), numpy.nan) for i in range(len(SENTINEL_VALUE_COLUMNS_ORIG)): sentinel_values[i] = getattr(netcdf_dataset, SENTINEL_VALUE_COLUMNS_ORIG[i]) metadata_dict.update({SENTINEL_VALUE_COLUMN: sentinel_values}) netcdf_dataset.close() return metadata_dict