Example #1
0
def time_adjustment(first_data_cube,
                    coefficient_cube,
                    timescale,
                    branch_time=None):
    """Determine the adjustment that needs to be made to time axis.

    Args:
        first_data_cube (iris.Cube.cube)
        coefficient_cube (iris.Cube.cube)
        timescale (str): annual or monthly
        branch_time (float): Override the branch time in the file metadata

    For CMIP5 monthly data, the branch time represents the start of the month (e.g. 1 Jan),
      while the first data time is mid-month. A factor of 15.5 is used to fix this.
      When iris is used to calculate annual data from these monthly files,
      the first value is shifted from 15.5 to 182.5.
    
    """

    if timescale == 'monthly':
        adjustment = 15.5
    elif timescale == 'annual':
        adjustment = 182.5

    if branch_time:
        branch_time_value = float(branch_time) + adjustment
    else:
        branch_time_value = float(
            first_data_cube.attributes['branch_time']) + adjustment
    branch_time_unit = coefficient_cube.attributes['time_unit']
    assert 'days' in branch_time_unit
    branch_time_calendar = coefficient_cube.attributes['time_calendar']
    data_time_coord = first_data_cube.coord('time')

    new_unit = cf_units.Unit(branch_time_unit, calendar=branch_time_calendar)
    data_time_coord.convert_units(new_unit)

    first_experiment_time = data_time_coord.points[0]
    time_diff = first_experiment_time - branch_time_value

    return time_diff, branch_time_value, new_unit
Example #2
0
def _missing_forecast_period(cube):
    """
    Returns a reference time and significance code together with a forecast
    period and corresponding units type code.

    """
    t_coord = cube.coord("time")

    if cube.coords('forecast_reference_time'):
        # Make copies and convert them to common "hours since" units.
        hours_since = cf_units.Unit('hours since epoch',
                                    calendar=t_coord.units.calendar)
        frt_coord = cube.coord('forecast_reference_time').copy()
        frt_coord.convert_units(hours_since)
        t_coord = t_coord.copy()
        t_coord.convert_units(hours_since)
        # Extract values.
        t = t_coord.bounds[0, 0] if t_coord.has_bounds() else t_coord.points[0]
        frt = frt_coord.points[0]
        # Calculate GRIB parameters.
        rt = frt_coord.units.num2date(frt)
        rt_meaning = 1  # Forecast reference time.
        fp = t - frt
        integer_fp = int(fp)
        if integer_fp != fp:
            msg = 'Truncating floating point forecast period {} to ' \
                  'integer value {}'
            warnings.warn(msg.format(fp, integer_fp))
        fp = integer_fp
        fp_meaning = 1  # Hours
    else:
        # With no forecast period or forecast reference time set assume a
        # reference time significance of "Observation time" and set the
        # forecast period to 0h.
        t = t_coord.bounds[0, 0] if t_coord.has_bounds() else t_coord.points[0]
        rt = t_coord.units.num2date(t)
        rt_meaning = 3  # Observation time
        fp = 0
        fp_meaning = 1  # Hours

    return rt, rt_meaning, fp, fp_meaning
Example #3
0
def get_bounds_of_distribution(bounds_pairing_key, desired_units):
    """
    Gets the bounds of the distribution and converts the units of the
    bounds_pairing to the desired_units.

    This method gets the bounds values and units from the imported
    dictionaries: BOUNDS_FOR_ECDF and units_of_BOUNDS_FOR_ECDF.
    The units of the bounds are converted to be the desired units.

    Args:
        bounds_pairing_key (str):
            Name of key to be used for the BOUNDS_FOR_ECDF dictionary, in order
            to get the desired bounds_pairing.
        desired_units (cf_units.Unit):
            Units to which the bounds_pairing will be converted.

    Returns:
        bounds_pairing (tuple):
            Lower and upper bound to be used as the ends of the
            empirical cumulative distribution function, converted to have
            the desired units.

    Raises:
        KeyError: If the bounds_pairing_key is not within the BOUNDS_FOR_ECDF
            dictionary.

    """
    # Extract bounds from dictionary of constants.
    try:
        bounds_pairing = BOUNDS_FOR_ECDF[bounds_pairing_key].value
        bounds_pairing_units = BOUNDS_FOR_ECDF[bounds_pairing_key].units
    except KeyError as err:
        msg = ("The bounds_pairing_key: {} is not recognised "
               "within BOUNDS_FOR_ECDF {}. \n"
               "Error: {}".format(
                   bounds_pairing_key, BOUNDS_FOR_ECDF, err))
        raise KeyError(msg)
    bounds_pairing_units = unit.Unit(bounds_pairing_units)
    bounds_pairing = bounds_pairing_units.convert(
        np.array(bounds_pairing), desired_units)
    return bounds_pairing
Example #4
0
 def _check_coord(self, cmor, coord, var_name):
     """Check single coordinate."""
     if coord.var_name == 'time':
         return
     if cmor.units:
         if str(coord.units) != cmor.units:
             fixed = False
             if self.automatic_fixes:
                 try:
                     new_unit = cf_units.Unit(cmor.units,
                                              coord.units.calendar)
                     coord.convert_units(new_unit)
                     fixed = True
                 except ValueError:
                     pass
             if not fixed:
                 self.report_error(self._attr_msg, var_name, 'units',
                                   cmor.units, coord.units)
     self._check_coord_values(cmor, coord, var_name)
     if not self.automatic_fixes:
         self._check_coord_monotonicity_and_direction(cmor, coord, var_name)
    def _make_cf_grib2_entry(standard_name, long_name, param_discipline,
                             param_category, param_number, units):
        """
        Check data, convert types and make a new _CF_TABLE key/value pair.

        """
        assert standard_name is not None or long_name is not None
        if standard_name is not None:
            long_name = None
            if standard_name not in iris.std_names.STD_NAMES:
                warnings.warn('{} is not a recognised CF standard name '
                              '(skipping).'.format(standard_name))
                return None
        cf_key = _CfToGrib2KeyClass(standard_name, long_name)
        # convert units string to iris Unit (i.e. mainly, check it is good)
        a_cf_unit = cf_units.Unit(units)
        grib2_data = _CfToGrib2DataClass(discipline=int(param_discipline),
                                         category=int(param_category),
                                         number=int(param_number),
                                         units=a_cf_unit)
        return (cf_key, grib2_data)
Example #6
0
def _title(cube_or_coord, with_units):
    if cube_or_coord is None or isinstance(cube_or_coord, int):
        title = ""
    else:
        title = cube_or_coord.name().replace("_", " ").capitalize()
        units = cube_or_coord.units
        if with_units and not (units.is_unknown() or units.is_no_unit()
                               or units == cf_units.Unit("1")):

            if _use_symbol(units):
                units = units.symbol
            elif units.is_time_reference():
                # iris.plot uses matplotlib.dates.date2num, which is fixed to the below unit.
                if version.parse(_mpl_version) >= version.parse("3.3"):
                    days_since = "1970-01-01"
                else:
                    days_since = "0001-01-01"
                units = "days since {}".format(days_since)
            title += " / {}".format(units)

    return title
Example #7
0
    def setUp(self):
        # make a series of 'day numbers' for the time, that slide across month
        # boundaries
        day_numbers = np.arange(0, 600, 27, dtype=np.int32)

        cube = iris.cube.Cube(day_numbers,
                              long_name='test cube',
                              units='metres')

        # use day numbers as data values also (don't actually use this for
        # anything)
        cube.data = day_numbers

        time_coord = iris.coords.DimCoord(day_numbers,
                                          standard_name='time',
                                          units=cf_units.Unit(
                                              'days since epoch', 'gregorian'))
        cube.add_dim_coord(time_coord, 0)

        self.cube = cube
        self.time_coord = time_coord
Example #8
0
def getTimeCoord(file):
    timeunit = cf_units.Unit('hours since 1970-01-01',
                             calendar=cf_units.CALENDAR_GREGORIAN)

    dtstrings = os.path.basename(file).split('.')[4]

    start_dt = dt.datetime.strptime(
        dtstrings.split('-')[0] + '-' + dtstrings.split('-')[1],
        '%Y%m%d-S%H%M%S')
    end_dt = dt.datetime.strptime(
        dtstrings.split('-')[0] + '-' + dtstrings.split('-')[2],
        '%Y%m%d-E%H%M%S')
    id_pt = start_dt + ((end_dt - start_dt) / 2)

    timecoord = iris.coords.DimCoord([timeunit.date2num(id_pt)],
                                     bounds=[(timeunit.date2num(start_dt),
                                              timeunit.date2num(end_dt))],
                                     standard_name='time',
                                     units=timeunit)

    return (timecoord)
Example #9
0
def run(collect_to: Path, tar_file_path: Path) -> None:
    tar_files = sorted(list((tar_file_path).rglob("*.tar")))
    logger.info(f"Start counting frames")
    n_frames = get_number_of_frames(tar_files)
    logger.info(f"Collecting {n_frames} radar frames")
    offset = "minutes since 1970-01-01 00:00:00"
    time_unit = cf_units.Unit(offset, calendar=cf_units.CALENDAR_STANDARD)
    x_size = 900
    y_size = 900

    with h5netcdf.File(collect_to, "w") as f:
        # Dimensions.
        f.dimensions["time"] = n_frames
        f.dimensions["x"] = x_size
        f.dimensions["y"] = y_size

        # Coordinate variables.
        time_var = f.create_variable("time", dimensions=("time", ), dtype=int)
        time_var.attrs["units"] = time_unit.name

        f.create_variable("x", dimensions=("x", ), data=np.arange(x_size))
        f.create_variable("y", dimensions=("y", ), data=np.arange(y_size))

        # Data variables.
        rain_var = f.create_variable(
            "rain",
            dimensions=("time", "y", "x"),
            dtype=int,
            chunks=True,
            compression="lzf",
        )
        rain_var.attrs["units"] = "mm/h"

        rain_var.attrs["_FillValue"] = -1
        start = 0
        end = 0
        for rain, time in collect_year(tar_files):
            end += len(time)
            write_to_netcdf(f, rain, time_unit.date2num(time), start, end)
            start = end
Example #10
0
def spot_timezone_fixture(spot_template):
    """Spot data on local time-zones
    (no forecast_period, forecast_reference_time matches spatial dimension)"""
    cube = spot_template.copy()
    cube.attributes = {
        "source": "Met Office Unified Model",
        "institution": "Met Office",
        "title": "Post-Processed MOGREPS-G Model Forecast Global Spot Values",
        "mosg__model_configuration": "gl_ens",
    }
    (time_source_coord,
     _), (frt_coord,
          _), (_,
               _) = construct_scalar_time_coords(time=datetime(2021, 2, 3, 14),
                                                 time_bounds=None,
                                                 frt=datetime(2021, 2, 3, 10))
    cube.add_aux_coord(frt_coord)
    (spatial_index, ) = cube.coord_dims("latitude")
    time_coord = iris.coords.AuxCoord(
        np.full(cube.shape, fill_value=time_source_coord.points),
        standard_name=time_source_coord.standard_name,
        units=time_source_coord.units,
    )
    cube.add_aux_coord(time_coord, spatial_index)
    local_time_coord_standards = TIME_COORDS["time_in_local_timezone"]
    local_time_units = cf_units.Unit(
        local_time_coord_standards.units,
        calendar=local_time_coord_standards.calendar,
    )
    timezone_points = np.array(
        np.round(local_time_units.date2num(datetime(2021, 2, 3, 15))),
        dtype=local_time_coord_standards.dtype,
    )
    cube.add_aux_coord(
        iris.coords.AuxCoord(
            timezone_points,
            long_name="time_in_local_timezone",
            units=local_time_units,
        ))
    return cube
def calculate_psi(cube, cfg):
    """Calculate temperature variability metric psi for a given cube."""
    window_length = cfg.get('window_length', 55)
    lag = cfg.get('lag', 1)
    psi_years = []
    psis = []

    # Moving average
    for yr_idx in range(cube.shape[0] - window_length):
        slc = slice(yr_idx, yr_idx + window_length)
        years = cube.coord('year').points[slc]
        tas = np.copy(cube.data[slc])

        # De-trend data
        reg = stats.linregress(years, tas)
        tas -= reg.slope * years + reg.intercept

        # Autocorrelation
        norm = np.sum(np.square(tas))
        [autocorr] = np.correlate(tas[:-lag], tas[lag:], mode='valid') / norm

        # Psi
        psi_years.append(years[-1])
        psis.append(np.std(tas) / np.sqrt(-np.log(autocorr)))

    # Return new cube
    year_coord = iris.coords.DimCoord(np.array(psi_years),
                                      var_name='year',
                                      long_name='year',
                                      units=cf_units.Unit('year'))
    psi_cube = iris.cube.Cube(
        np.array(psis),
        dim_coords_and_dims=[(year_coord, 0)],
        attributes={
            'window_length': window_length,
            'lag': lag,
            **cfg.get('output_attributes', {}),
        },
    )
    return psi_cube
Example #12
0
 def test_time_360(self):
     cube = Cube(np.array([0, 1, 2, 3, 4]), long_name="ts")
     time_unit = cf_units.Unit("days since 2000-01-01 00:00",
                               calendar=cf_units.CALENDAR_360_DAY)
     time_coord = DimCoord([0, 100.1, 200.2, 300.3, 400.4],
                           long_name="time", units=time_unit)
     cube.add_dim_coord(time_coord, 0)
     if netCDF4.__version__ > '1.2.4':
         expected_index = [netcdftime.Datetime360Day(2000, 1, 1, 0, 0),
                           netcdftime.Datetime360Day(2000, 4, 11, 2, 24),
                           netcdftime.Datetime360Day(2000, 7, 21, 4, 48),
                           netcdftime.Datetime360Day(2000, 11, 1, 7, 12),
                           netcdftime.Datetime360Day(2001, 2, 11, 9, 36)]
     else:
         expected_index = [netcdftime.datetime(2000, 1, 1, 0, 0),
                           netcdftime.datetime(2000, 4, 11, 2, 24),
                           netcdftime.datetime(2000, 7, 21, 4, 48),
                           netcdftime.datetime(2000, 11, 1, 7, 12),
                           netcdftime.datetime(2001, 2, 11, 9, 36)]
     series = iris.pandas.as_series(cube)
     self.assertArrayEqual(series, cube.data)
     self.assertArrayEqual(series.index, expected_index)
Example #13
0
    def create_cube(self):
        data = np.arange(4).reshape(2, 2)

        lat = iris.coords.DimCoord([0, 30],
                                   standard_name="latitude",
                                   units="degrees")
        volume = iris.coords.CellMeasure([0, 15],
                                         measure="volume",
                                         long_name="volume")
        area = iris.coords.CellMeasure([1.5],
                                       standard_name="height",
                                       units="m")
        t_unit = cf_units.Unit("hours since 1970-01-01 00:00:00",
                               calendar="gregorian")
        time = iris.coords.DimCoord([0, 6], standard_name="time", units=t_unit)

        cube = iris.cube.Cube(data, standard_name="air_temperature", units="K")
        cube.add_dim_coord(time, 0)
        cube.add_dim_coord(lat, 1)
        cube.add_cell_measure(volume, 1)
        cube.add_cell_measure(area)
        return cube
Example #14
0
    def _get_interpolation_inputs_from_dict(
            self,
            cube: Cube) -> Tuple[ndarray, ndarray, ndarray, Tuple[int, int]]:
        """
        Generate inputs required for linear interpolation.

        Args:
            cube:
                Cube containing the coordinate information that will be used
                for setting up the interpolation inputs.

        Returns:
            - Points within the configuration dictionary that will
              be used as the input to the interpolation.
            - Points within the cube that will be the target points
              for the interpolation.
            - Weights from the configuration dictionary that will be
              used as the input to the interpolation.
            - Values that be used if extrapolation is required. The
              fill values will be used as constants that are extrapolated
              if the target_points are outside the source_points
              provided. These are equal to the first and last values
              provided by the source weights.
        """
        (config_point, ) = cube.coord(self.config_coord_name).points
        source_points = self.config_dict[config_point][
            self.weighting_coord_name]
        source_points = np.array(source_points)
        if "units" in self.config_dict[config_point].keys():
            units = cf_units.Unit(self.config_dict[config_point]["units"])
            source_points = units.convert(
                source_points,
                cube.coord(self.weighting_coord_name).units)

        target_points = cube.coord(self.weighting_coord_name).points
        source_weights = self.config_dict[config_point][self.weights_key_name]

        fill_value = (source_weights[0], source_weights[-1])
        return source_points, target_points, source_weights, fill_value
 def check_grib1_cf(param,
                    standard_name,
                    long_name,
                    units,
                    height=None,
                    t2version=128,
                    centre=98,
                    expect_none=False):
     a_cf_unit = cf_units.Unit(units)
     cfdata = gptx.grib1_phenom_to_cf_info(param_number=param,
                                           table2_version=t2version,
                                           centre_number=centre)
     if expect_none:
         self.assertIsNone(cfdata)
     else:
         self.assertEqual(cfdata.standard_name, standard_name)
         self.assertEqual(cfdata.long_name, long_name)
         self.assertEqual(cfdata.units, a_cf_unit)
         if height is None:
             self.assertIsNone(cfdata.set_height)
         else:
             self.assertEqual(cfdata.set_height, float(height))
Example #16
0
    def setUp(self):
        """Create cubes containing a regular grid."""
        grid_size = 5
        data = np.zeros((1, grid_size, grid_size), dtype=np.float32)
        height = AuxCoord(np.array([1.5], dtype=np.float32),
                          standard_name='height', units='m')
        self.temperature = set_up_variable_cube(
            data, spatial_grid='equalarea', include_scalar_coords=[height],
            standard_grid_metadata='uk_det')

        # Copies temperature cube to create orography cube.
        self.orography = set_up_variable_cube(
            data[0].copy(), name='surface_altitude', units='m',
            spatial_grid='equalarea')
        for coord in ["time", "forecast_period", "forecast_reference_time"]:
            self.orography.remove_coord(coord)

        # Copies orography cube to create land/sea mask cube.
        self.land_sea_mask = self.orography.copy(
            data=np.ones((grid_size, grid_size), dtype=np.float32))
        self.land_sea_mask.rename('land_binary_mask')
        self.land_sea_mask.units = cf_units.Unit('1')
Example #17
0
def _label(cube, mode, result=None, ndims=2, coords=None):
    """Puts labels on the current plot using the given cube."""

    plt.title(_title(cube, with_units=False))

    if result is not None:
        draw_edges = mode == iris.coords.POINT_MODE
        bar = plt.colorbar(result,
                           orientation='horizontal',
                           drawedges=draw_edges)
        has_known_units = not (cube.units.is_unknown()
                               or cube.units.is_no_unit())
        if has_known_units and cube.units != cf_units.Unit('1'):
            # Use shortest unit representation for anything other than time
            if _use_symbol(cube.units):
                bar.set_label(cube.units.symbol)
            else:
                bar.set_label(cube.units)
        # Remove the tick which is put on the colorbar by default.
        bar.ax.tick_params(length=0)

    if coords is None:
        plot_defn = iplt._get_plot_defn(cube, mode, ndims)
    else:
        plot_defn = iplt._get_plot_defn_custom_coords_picked(cube,
                                                             coords,
                                                             mode,
                                                             ndims=ndims)

    if ndims == 2:
        if not iplt._can_draw_map(plot_defn.coords):
            plt.ylabel(_title(plot_defn.coords[0], with_units=True))
            plt.xlabel(_title(plot_defn.coords[1], with_units=True))
    elif ndims == 1:
        plt.xlabel(_title(plot_defn.coords[0], with_units=True))
        plt.ylabel(_title(cube, with_units=True))
    else:
        msg = 'Unexpected number of dimensions (%s) given to _label.' % ndims
        raise ValueError(msg)
Example #18
0
    def regrid_time(self, cube, start_y, end_y):
        '''
        '''
        start = datetime.datetime(start_y, 1, 1)
        dt_array = np.array([
            start + relativedelta.relativedelta(years=i)
            for i in range((end_y - start_y) + 1)
        ])

        new_t_unit_str = '{} since 1850-01-01 00:00:00'.format('days')
        new_t_unit = cf_units.Unit(new_t_unit_str,
                                   calendar=cf_units.CALENDAR_STANDARD)

        new_dt_points = [new_t_unit.date2num(new_dt) for new_dt in dt_array]
        new_t_coord = iris.coords.DimCoord(new_dt_points,
                                           standard_name='time',
                                           units=new_t_unit)

        t_coord_dim = cube.coord_dims('time')
        cube.remove_coord('time')
        cube.add_dim_coord(new_t_coord, t_coord_dim)
        return cube
Example #19
0
def write_data(cfg, tcr, external_file=None):
    """Write netcdf files."""
    var_attr = {
        'short_name': 'tcr',
        'long_name': 'Transient Climate Response (TCR)',
        'units': cf_units.Unit('K'),
    }
    path = get_diagnostic_filename(var_attr['short_name'], cfg)
    project = list(cfg['input_data'].values())[0]['project']
    io.save_scalar_data(tcr, path, var_attr, attributes={'project': project})
    caption = "{long_name} for multiple climate models.".format(**var_attr)
    provenance_record = get_provenance_record(caption)
    ancestor_files = []
    for dataset_name in tcr.keys():
        datasets = select_metadata(cfg['input_data'].values(),
                                   dataset=dataset_name)
        ancestor_files.extend([d['filename'] for d in datasets])
    if external_file is not None:
        ancestor_files.append(external_file)
    provenance_record['ancestors'] = ancestor_files
    with ProvenanceLogger(cfg) as provenance_logger:
        provenance_logger.log(path, provenance_record)
Example #20
0
def test_invalid_units(_, unit_str):
    # Confirm that invalid udunits-2 units are also invalid in our grammar.

    try:
        cf_units.Unit(unit_str)
        cf_valid = True
    except ValueError:
        cf_valid = False

    # Double check that udunits2 can't parse this.
    assert cf_valid is False, \
        'Unit {!r} is unexpectedly valid in UDUNITS2'.format(unit_str)

    try:
        normalize(unit_str)
        can_parse = True
    except SyntaxError:
        can_parse = False

    # Now confirm that we couldn't parse this either.
    msg = 'Parser unexpectedly able to deal with {}'.format(unit_str)
    assert can_parse is False, msg
Example #21
0
def get_bounds_of_distribution(bounds_pairing_key, desired_units):
    """
    Gets the bounds of the distribution and converts the units of the
    bounds_pairing to the desired_units.

    This method gets the bounds values and units from the imported
    dictionaries: bounds_for_ecdf and units_of_bounds_for_ecdf.
    The units of the bounds are converted to be the desired units.

    Parameters
    ----------
    bounds_pairing_key : String
        Name of key to be used for the bounds_for_ecdf dictionary, in order
        to get the desired bounds_pairing.
    desired_units : cf_units.Unit
        Units to which the bounds_pairing will be converted.

    Returns
    -------
    bounds_pairing : Tuple
        Lower and upper bound to be used as the ends of the
        empirical cumulative distribution function, converted to have
        the desired units.

    """
    # Extract bounds from dictionary of constants.
    try:
        bounds_pairing = bounds_for_ecdf[bounds_pairing_key].value
        bounds_pairing_units = bounds_for_ecdf[bounds_pairing_key].units
    except KeyError as err:
        msg = ("The bounds_pairing_key: {} is not recognised "
               "within bounds_for_ecdf {}. \n"
               "Error: {}".format(
                   bounds_pairing_key, bounds_for_ecdf, err))
        raise KeyError(msg)
    bounds_pairing_units = unit.Unit(bounds_pairing_units)
    bounds_pairing = bounds_pairing_units.convert(
        np.array(bounds_pairing), desired_units)
    return bounds_pairing
Example #22
0
def _extract_variable(short_name, var, version, cfg, filepath, out_dir):
    """Extract variable."""
    raw_var = var.get('raw', short_name)
    with catch_warnings():
        filterwarnings(
            action='ignore',
            message='Ignoring netCDF variable .* invalid units .*',
            category=UserWarning,
            module='iris',
        )
        cube = iris.load_cube(filepath, utils.var_name_constraint(raw_var))

    # Fix units
    cmor_info = cfg['cmor_table'].get_variable(var['mip'], short_name)
    utils._set_units(cube, var.get('raw_units', short_name))
    # fix calendar type
    cal_time = var.get('calendar', short_name)
    origin_time = cube.coord('time').units.origin
    cube.coord('time').units = cf_units.Unit(origin_time, calendar=cal_time)
    cube.convert_units(cmor_info.units)
    utils.convert_timeunits(cube, 1950)

    # Fix coordinates
    utils.fix_coords(cube)

    # Fix metadata
    attrs = cfg['attributes']
    attrs['mip'] = var['mip']
    attrs['version'] = version
    utils.fix_var_metadata(cube, cmor_info)
    utils.set_global_atts(cube, attrs)

    # Save variable
    utils.save_variable(cube,
                        short_name,
                        out_dir,
                        attrs,
                        unlimited_dimensions=['time'])
Example #23
0
def fix_time_coord_duveiller2018(cube):
    """Fix the time coordinate for dataset Duveiller2018."""
    # Rename 'Month' to 'time'
    cube.coord('Month').rename('time')

    # Create arrays for storing datetime objects
    custom_time = np.zeros((12), dtype=object)
    custom_time_bounds = np.empty((12, 2), dtype=object)
    custom_time_units = 'days since 1950-01-01 00:00:00.0'

    # Now fill the object arrays defined above with datetime objects
    # corresponding to correct time and time_bnds
    for i in range(custom_time_bounds.shape[0]):
        n_month = i + 1  # we start with month number 1, at position 0
        # Start with time_bnds
        time_bnd_a = datetime.datetime(2010, n_month, 1)
        if n_month == 12:
            time_bnd_b = datetime.datetime(2011, 1, 1)
        else:
            time_bnd_b = datetime.datetime(2010, n_month + 1, 1)
        # Get time 'point' from midpoint between bnd_a and bnd_b
        time_midpoint = time_bnd_a + 0.5 * (time_bnd_b - time_bnd_a)
        custom_time_bounds[n_month - 1, 0] = time_bnd_a
        custom_time_bounds[n_month - 1, 1] = time_bnd_b
        custom_time[n_month - 1] = time_midpoint

    # Convert them
    time_bnds = cf_units.date2num(custom_time_bounds, custom_time_units,
                                  cf_units.CALENDAR_GREGORIAN)
    time_midpoints = cf_units.date2num(custom_time, custom_time_units,
                                       cf_units.CALENDAR_GREGORIAN)

    # Add them to the cube
    cube.coord('time').bounds = time_bnds
    cube.coord('time').points = time_midpoints

    # Set the correct time unit, as defined above
    cube.coord('time').units = cf_units.Unit(custom_time_units)
Example #24
0
    def calculate(cubes):
        """Compute latitude of maximum meridional wind speed."""
        # Load cube, extract correct region and perform zonal mean
        ua_cube = cubes.extract_cube(iris.Constraint(name='eastward_wind'))
        ua_cube = ua_cube.interpolate([('air_pressure', PLEV)],
                                      scheme=iris.analysis.Linear())
        ua_cube = ua_cube.extract(
            iris.Constraint(latitude=lambda cell: LAT[0] <= cell <= LAT[1]))
        ua_cube = ua_cube.collapsed('longitude', iris.analysis.MEAN)

        # Calculate maximum jet position
        uajet_vals = []
        for time_slice in ua_cube.slices(['latitude']):
            ua_data = time_slice.data

            # Get maximum ua and corresponding index
            idx_max_ua = np.argmax(ua_data)
            slc = slice(idx_max_ua - 1, idx_max_ua + 2)

            # Perform 2nd degree polynomial fit to get maximum jet position
            x_vals = ua_data[slc]
            y_vals = time_slice.coord('latitude').points[slc]
            polyfit = np.polyfit(x_vals, y_vals, 2)
            polynom = np.poly1d(polyfit)
            uajet_vals.append(polynom(np.max(ua_data)))

        uajet_cube = iris.cube.Cube(uajet_vals,
                                    units=cf_units.Unit('degrees_north'),
                                    dim_coords_and_dims=[
                                        (ua_cube.coord('time'), 0)
                                    ],
                                    attributes={
                                        'plev': PLEV,
                                        'lat_range_0': LAT[0],
                                        'lat_range_1': LAT[1]
                                    })

        return uajet_cube
Example #25
0
 def simple_1d_time_cubes(self, calendar="gregorian"):
     coord_points = [1, 2, 3, 4, 5]
     data_points = [273, 275, 278, 277, 274]
     reftimes = [
         "hours since 1970-01-01 00:00:00",
         "hours since 1970-01-02 00:00:00",
     ]
     list_of_cubes = []
     for reftime in reftimes:
         cube = iris.cube.Cube(
             np.array(data_points, dtype=np.float32),
             standard_name="air_temperature",
             units="K",
         )
         unit = cf_units.Unit(reftime, calendar=calendar)
         coord = iris.coords.DimCoord(
             points=np.array(coord_points, dtype=np.float32),
             standard_name="time",
             units=unit,
         )
         cube.add_dim_coord(coord, 0)
         list_of_cubes.append(cube)
     return list_of_cubes
Example #26
0
def exp(cube, in_place=False):
    """
    Calculate the exponential (exp(x)) of the cube.

    Args:

    * cube:
        An instance of :class:`iris.cube.Cube`.

    .. note::

        Taking an exponential will return a cube with dimensionless units.

    Kwargs:

    * in_place:
        Whether to create a new Cube, or alter the given "cube".

    Returns:
        An instance of :class:`iris.cube.Cube`.

    """
    return _math_op_common(cube, np.exp, cf_units.Unit('1'), in_place=in_place)
Example #27
0
def _load_4d_testcube():
    # Load example 4d data (TZYX).
    test_cube = iris.tests.stock.realistic_4d()
    # Replace forecast_period coord with a multi-valued version.
    time_coord = test_cube.coord('time')
    n_times = len(time_coord.points)
    forecast_dims = test_cube.coord_dims(time_coord)
    test_cube.remove_coord('forecast_period')
    # Make up values (including bounds), to roughly match older testdata.
    point_values = np.linspace((1 + 1.0 / 6), 2.0, n_times)
    point_uppers = point_values + (point_values[1] - point_values[0])
    bound_values = np.column_stack([point_values, point_uppers])
    # NOTE: this must be a DimCoord
    #  - an equivalent AuxCoord produces different plots.
    new_forecast_coord = iris.coords.DimCoord(points=point_values,
                                              bounds=bound_values,
                                              standard_name='forecast_period',
                                              units=cf_units.Unit('hours'))
    test_cube.add_aux_coord(new_forecast_coord, forecast_dims)
    # Heavily reduce dimensions for faster testing.
    # NOTE: this makes ZYX non-contiguous.  Doesn't seem to matter for now.
    test_cube = test_cube[:, ::10, ::10, ::10]
    return test_cube
Example #28
0
def run(combine_to: Path, files_to_combine: Iterable[Path]) -> None:
    total_shape = get_shape(files_to_combine)
    logger.info(f"Creating a dataset of shape {total_shape}")
    offset = "minutes since 1970-01-01 00:00:00"
    time_unit = cf_units.Unit(offset, calendar=cf_units.CALENDAR_STANDARD)
    x_size = total_shape[1]
    y_size = total_shape[2]
    with h5netcdf.File(combine_to, "w") as f:
        # Dimensions.
        f.dimensions["time"] = total_shape[0]
        f.dimensions["y"] = y_size
        f.dimensions["x"] = x_size

        # Coordinate variables.
        time_var = f.create_variable("time", dimensions=("time", ), dtype=int)
        time_var.attrs["units"] = time_unit.name

        f.create_variable("x", dimensions=("x", ), data=np.arange(x_size))
        f.create_variable("y", dimensions=("y", ), data=np.arange(y_size))

        # Data variables.
        rain_var = f.create_variable("rain",
                                     dimensions=("time", "y", "x"),
                                     dtype=int,
                                     compression="lzf")
        rain_var.attrs["units"] = "mm/h"
        rain_var.attrs["_FillValue"] = -1

        start = 0
        end = 0
        for year_netcdf in files_to_combine:
            logger.info(f"Processing {year_netcdf}")
            with h5netcdf.File(year_netcdf, "r") as year_netcdf_file:
                end += len(year_netcdf_file["time"])
                logger.info(f"Writing to [{start}:{end}]")
                write_to_netcdf(f, year_netcdf_file, start, end)
                start = end
Example #29
0
def parse_config(config_file):
    """Parse the yaml file with the configuration for each run."""
    from datetime import date, datetime, timedelta

    import yaml
    import pytz
    import cf_units

    with open(config_file, 'r') as f:
        config = yaml.safe_load(f)

    # Dates are normalized to UTC.
    start = config['date']['start']
    stop = config['date']['stop']
    if isinstance(start, int) and isinstance(stop, int):
        start = datetime.combine(
            date.today() - timedelta(days=abs(start)), datetime.min.time()
            )
        stop = datetime.combine(
            date.today() + timedelta(days=abs(stop)), datetime.min.time()
            )
    elif isinstance(start, int) and isinstance(stop, datetime):
        start = stop - timedelta(days=abs(start))
    elif isinstance(start, datetime) and isinstance(stop, int):
        stop = start + timedelta(days=abs(stop))
    elif isinstance(start, datetime) and isinstance(stop, datetime):
        pass
    else:
        msg = "Expect dates (YYYY-MM-DD hh:mm:ss) or days offest (int).\nGot start={} and stop={}."  # noqa
        raise ValueError(msg.format(start, stop))
    config['date']['start'] = start.replace(tzinfo=pytz.utc)
    config['date']['stop'] = stop.replace(tzinfo=pytz.utc)

    # Units.
    config['units'] = cf_units.Unit(config['units'])

    return config
Example #30
0
 def setUp(self):
     data = np.arange(24, dtype=np.float32).reshape(2, 3, 4)
     cube = iris.cube.Cube(data, standard_name="air_temperature", units="K")
     # Time coord
     t_unit = cf_units.Unit("hours since 1970-01-01 00:00:00",
                            calendar="gregorian")
     t_coord = iris.coords.DimCoord(
         points=np.arange(2, dtype=np.float32),
         standard_name="time",
         units=t_unit,
     )
     cube.add_dim_coord(t_coord, 0)
     # Lats and lons
     x_coord = iris.coords.DimCoord(
         points=np.arange(3, dtype=np.float32),
         standard_name="longitude",
         units="degrees",
     )
     cube.add_dim_coord(x_coord, 1)
     y_coord = iris.coords.DimCoord(
         points=np.arange(4, dtype=np.float32),
         standard_name="latitude",
         units="degrees",
     )
     cube.add_dim_coord(y_coord, 2)
     # Scalars
     cube.add_aux_coord(iris.coords.AuxCoord([0], "height", units="m"))
     # Aux Coords
     cube.add_aux_coord(
         iris.coords.AuxCoord(data, long_name="wibble", units="1"),
         data_dims=(0, 1, 2),
     )
     cube.add_aux_coord(
         iris.coords.AuxCoord([0, 1, 2], long_name="foo", units="1"),
         data_dims=(1, ),
     )
     self.cube = cube