Example #1
0
 def test_long_time_interval(self):
     # This test should fail with an error that we need to catch properly.
     unit = 'years since 1970-01-01'
     date = datetime.datetime(1970, 1, 1, 0, 0, 5)
     exp_emsg = 'interval of "months", "years" .* got \'years\'.'
     with self.assertRaisesRegexp(ValueError, exp_emsg):
         date2num(date, unit, self.calendar)
Example #2
0
def set_up_cube(data,
                name,
                units,
                realizations=np.array([0, 1, 2], dtype=np.int32),
                timesteps=1,
                y_dimension_length=3,
                x_dimension_length=3):
    """Create a cube containing multiple realizations."""
    try:
        cube = Cube(data, standard_name=name, units=units)
    except ValueError:
        cube = Cube(data, long_name=name, units=units)

    cube.add_dim_coord(DimCoord(realizations, 'realization', units='1'), 0)
    time_origin = "seconds since 1970-01-01 00:00:00"
    calendar = "gregorian"
    tunit = Unit(time_origin, calendar)
    dt1 = datetime.datetime(2017, 1, 10, 3, 0)
    dt2 = datetime.datetime(2017, 1, 10, 4, 0)
    num1 = cf_units.date2num(dt1, time_origin, calendar)
    num2 = cf_units.date2num(dt2, time_origin, calendar)
    cube.add_dim_coord(
        DimCoord(np.linspace(num1, num2, timesteps, dtype=np.int64),
                 "time",
                 units=tunit), 1)
    cube.add_dim_coord(
        DimCoord(np.linspace(-45.0, 45.0, y_dimension_length,
                             dtype=np.float32),
                 'latitude',
                 units='degrees'), 2)
    cube.add_dim_coord(
        DimCoord(np.linspace(120, 180, x_dimension_length, dtype=np.float32),
                 'longitude',
                 units='degrees'), 3)
    return cube
Example #3
0
    def test_basic_no_time_bounds(self):
        """Test that it creates appropriate bounds if there are no time bounds
        """
        for cube in self.cubelist:
            cube.coord("time").bounds = None

        time_point = np.around(
            date2num(dt(2015, 11, 19, 2), TIME_UNIT, CALENDAR)
        ).astype(np.int64)
        time_bounds = [
            np.around(date2num(dt(2015, 11, 19, 1), TIME_UNIT, CALENDAR)).astype(
                np.int64
            ),
            np.around(date2num(dt(2015, 11, 19, 3), TIME_UNIT, CALENDAR)).astype(
                np.int64
            ),
        ]
        expected_result = iris.coords.DimCoord(
            time_point, bounds=time_bounds, standard_name="time", units=TIME_UNIT
        )

        result = expand_bounds(
            self.cubelist[0], self.cubelist, ["time"], use_midpoint=True
        )
        self.assertEqual(result.coord("time"), expected_result)
Example #4
0
def _extract_file_metadata(file_path):
    """
    Extracts metadata from file name and returns dictionary.
    """
    # e.g. tasmax_day_IPSL-CM5A-LR_amip4K_r1i1p1_18590101-18591230.nc
    keys = ("var_id", "table", "climate_model", "experiment", "ensemble",
            "time_range")

    items = os.path.splitext(os.path.basename(file_path))[0].split("_")
    data = {}

    for i in range(len(items)):
        key = keys[i]
        value = items[i]

        if key == "time_range":
            start_time, end_time = value.split("-")
            data["start_time"] = cf_units.date2num(
                _date_from_string(start_time), TIME_UNITS, '360_day')
            data["end_time"] = cf_units.date2num(_date_from_string(end_time),
                                                 TIME_UNITS, '360_day')
            data["time_units"] = TIME_UNITS
        else:
            data[key] = value

    return data
Example #5
0
def construct_scalar_time_coords(time, time_bounds, frt):
    """
    Construct scalar time coordinates as aux_coord list

    Args:
        time (datetime.datetime):
            Single time point
        time_bounds (tuple or list of datetime.datetime instances or None):
            Lower and upper bound on time point, if required
        frt (datetime.datetime):
            Single forecast reference time point

    Returns:
        coord_dims (list):
            List of iris.coord.DimCoord instances with the associated "None"
            dimension (format required by iris.cube.Cube initialisation).
    """
    # generate time coordinate points
    time_point_seconds = np.round(date2num(time, TIME_UNIT,
                                           CALENDAR)).astype(np.int64)
    frt_point_seconds = np.round(date2num(frt, TIME_UNIT,
                                          CALENDAR)).astype(np.int64)

    if time_point_seconds < frt_point_seconds:
        raise ValueError('Cannot set up cube with negative forecast period')
    fp_point_seconds = (time_point_seconds - frt_point_seconds).astype(
        np.int32)

    # parse bounds if required
    if time_bounds is not None:
        lower_bound = np.round(date2num(time_bounds[0], TIME_UNIT,
                                        CALENDAR)).astype(np.int64)
        upper_bound = np.round(date2num(time_bounds[1], TIME_UNIT,
                                        CALENDAR)).astype(np.int64)
        bounds = (min(lower_bound, upper_bound), max(lower_bound, upper_bound))
        if time_point_seconds < bounds[0] or time_point_seconds > bounds[1]:
            raise ValueError('Time point {} not within bounds {}-{}'.format(
                time, time_bounds[0], time_bounds[1]))
        fp_bounds = np.array(
            [[bounds[0] - frt_point_seconds,
              bounds[1] - frt_point_seconds]]).astype(np.int32)
    else:
        bounds = None
        fp_bounds = None

    # create coordinates
    time_coord = DimCoord(time_point_seconds,
                          "time",
                          units=TIME_UNIT,
                          bounds=bounds)
    frt_coord = DimCoord(frt_point_seconds,
                         "forecast_reference_time",
                         units=TIME_UNIT)
    fp_coord = DimCoord(fp_point_seconds,
                        "forecast_period",
                        units="seconds",
                        bounds=fp_bounds)

    coord_dims = [(time_coord, None), (frt_coord, None), (fp_coord, None)]
    return coord_dims
Example #6
0
    def setUp(self):
        """Set up a cubelist for testing"""

        data = 275.5 * np.ones((3, 3), dtype=np.float32)
        frt = dt(2015, 11, 19, 0)
        time_points = [dt(2015, 11, 19, 1), dt(2015, 11, 19, 3)]
        time_bounds = [
            [dt(2015, 11, 19, 0), dt(2015, 11, 19, 2)],
            [dt(2015, 11, 19, 1), dt(2015, 11, 19, 3)],
        ]

        self.cubelist = iris.cube.CubeList([])
        for tpoint, tbounds in zip(time_points, time_bounds):
            cube = set_up_variable_cube(data,
                                        frt=frt,
                                        time=tpoint,
                                        time_bounds=tbounds)
            self.cubelist.append(cube)

        self.expected_bounds_seconds = [
            date2num(dt(2015, 11, 19, 0), TIME_UNIT,
                     CALENDAR).astype(np.int64),
            date2num(dt(2015, 11, 19, 3), TIME_UNIT,
                     CALENDAR).astype(np.int64),
        ]

        self.expected_bounds_hours = [
            date2num(dt(2015, 11, 19, 0), "hours since 1970-01-01 00:00:00",
                     CALENDAR),
            date2num(dt(2015, 11, 19, 3), "hours since 1970-01-01 00:00:00",
                     CALENDAR),
        ]
Example #7
0
def date_filter_files(data_files, start_year, end_year):
    """
    Filter a set of data file model objects and return those that lie between
    or include the 1st January in the start year and the last day of the end
    year. The data files are assumed to be from a common source and so they all
    have the same time_units and calendar.

    :param django.db.models.query.QuerySet data_files: the data files to
        filter by date
    :param int start_year: the first year of the range to find.
    :param int end_year: the final year of the range to find.
    :returns: the filtered files
    :rtype: django.db.models.query.QuerySet
    """
    if data_files:
        time_units = data_files[0].time_units
        calendar = data_files[0].calendar
    else:
        return None

    if start_year is not None and time_units and calendar:
        start_float = cf_units.date2num(
            datetime.datetime(start_year, 1, 1), time_units,
            calendar
        )
    else:
        start_float = None
    if end_year is not None and time_units and calendar:
        end_float = cf_units.date2num(
            datetime.datetime(end_year + 1, 1, 1), time_units,
            calendar
        )
    else:
        end_float = None

    timeless_files = data_files.filter(start_time__isnull=True)

    if start_float is not None and end_float is not None:
        between_files = (data_files.exclude(start_time__isnull=True).
                         filter(start_time__gte=start_float,
                                end_time__lt=end_float))
        start_straddle = (data_files.exclude(start_time__isnull=True).
                          filter(start_time__lt=start_float,
                                 end_time__gt=start_float))
        end_straddle = (data_files.exclude(start_time__isnull=True).
                        filter(start_time__lt=end_float,
                               end_time__gt=end_float))
        data_files = between_files | start_straddle | end_straddle
    else:
        data_files = data_files.exclude(start_time__isnull=True)

    return (timeless_files | data_files).distinct()
def extract_time_range(cubes, start, end):
    """Extract time ranged data."""
    time_ranged_cubes = []
    iris.util.unify_time_units(cubes)
    time_unit = cubes[0].coord('time').units.name
    dd_start = dd(start.year, start.month, start.day, 0, 0, 0)
    t_1 = cf_units.date2num(dd_start, time_unit, cf_units.CALENDAR_STANDARD)
    dd_end = dd(end.year, end.month, end.day, 0, 0, 0)
    t_2 = cf_units.date2num(dd_end, time_unit, cf_units.CALENDAR_STANDARD)
    for cube in cubes:
        time_constraint = iris.Constraint(time=lambda t: (
            t_1 <= datetime_to_int_days(t.point, time_unit) <= t_2))
        cube_slice = cube.extract(time_constraint)
        time_ranged_cubes.append(cube_slice)
    return time_ranged_cubes
Example #9
0
 def test_single(self):
     date = datetime.datetime(1970, 1, 1, 0, 0, 5)
     exp = 5.
     res = date2num(date, self.unit, self.calendar)
     # num2date won't return an exact value representing the date,
     # even if one exists
     self.assertAlmostEqual(exp, res, places=4)
    def test_wxcode_time_different_seconds(self):
        """ Test code works if time coordinate has a difference in the number
        of seconds, which should round to the same time in hours and minutes.
        This was raised by changes to cftime which altered its precision."""
        time_origin = "hours since 1970-01-01 00:00:00"
        calendar = "gregorian"
        dateval = datetime.datetime(2018, 9, 12, 5, 42, 59)
        numdateval = date2num(dateval, time_origin, calendar)
        time_points = [numdateval]

        cube = set_up_wxcube(time_points=time_points)
        cube.data = self.cube_data
        cube = iris.util.squeeze(cube)
        expected_result = np.array(
            [[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1],
             [2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3],
             [5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5],
             [6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6],
             [7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7],
             [8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8],
             [9, 9, 9, 9, 9, 9, 9, 9, 9, 10, 10, 10, 10, 10, 10, 10],
             [13, 13, 13, 13, 13, 13, 13, 13, 13, 14, 14, 14, 14, 14, 14, 14],
             [15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15],
             [16, 16, 16, 16, 16, 16, 16, 16, 16, 17, 17, 17, 17, 17, 17, 17],
             [18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18],
             [19, 19, 19, 19, 19, 19, 19, 19, 19, 20, 20, 20, 20, 20, 20, 20],
             [22, 22, 22, 22, 22, 22, 22, 22, 22, 23, 23, 23, 23, 23, 23, 23],
             [25, 25, 25, 25, 25, 25, 25, 25, 25, 26, 26, 26, 26, 26, 26, 26],
             [27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27],
             [28, 28, 28, 28, 28, 28, 28, 28, 29, 29, 29, 29, 29, 29, 29, 29]])
        result = update_daynight(cube)

        self.assertArrayEqual(result.data, expected_result)
        self.assertEqual(result.data.shape, (16, 16))
Example #11
0
def cycletime_to_number(
    cycletime: str,
    cycletime_format: str = "%Y%m%dT%H%MZ",
    time_unit: str = "hours since 1970-01-01 00:00:00",
    calendar: str = "gregorian",
) -> float:
    """Convert a cycletime of the format YYYYMMDDTHHMMZ into a numeric
    time value.

    Args:
        cycletime:
            A cycletime that can be converted into a datetime using the
            cycletime_format supplied.
        cycletime_format:
            String containg the appropriate directives to indicate how
            the output datetime should display.
        time_unit:
            String representation of the cycletime units.
        calendar:
            String describing the calendar used for defining the cycletime.
            The choice of calendar must be supported by cf_units.CALENDARS.

    Returns:
        A numeric value to represent the datetime using assumed choices
        for the unit of time and the calendar.
    """
    dtval = cycletime_to_datetime(cycletime, cycletime_format=cycletime_format)
    return cf_units.date2num(dtval, time_unit, calendar)
Example #12
0
def standardise_time_unit(time_float, time_unit, standard_unit, calendar):
    """
    Standardise a floating point time in one time unit by returning the
    corresponding time in the `standard_unit`. The original value is returned if
    it is already in the `standard_unit`. None is returned if the `time_float`
    is None.

    :param float time_float: The time to change
    :param str time_unit: The original time's units
    :param str standard_unit: The new unit
    :param str calendar: The cftime calendar
    :returns: A floating point representation of the old time in
        `standard_unit`
    """
    if (time_float is None or time_unit is None or
            standard_unit is None or calendar is None):
        return None

    if time_unit == standard_unit:
        return time_float

    date_time = cf_units.num2date(time_float, time_unit, calendar)
    corrected_time = cf_units.date2num(date_time, standard_unit, calendar)

    return corrected_time
Example #13
0
def calc_last_day_in_month(year, month, calendar):
    """
    Calculate the last day of the specified month using the calendar given.

    :param int year: The year
    :param int month: The month
    :param str calendar: The calendar to use, which must be supported by
        cf_units
    :returns: The last day of the specified month
    :rtype: int
    """
    ref_units = 'days since 1969-07-21'

    if month == 12:
        start_next_month_obj = netcdftime.datetime(year + 1, 1, 1)
    else:
        start_next_month_obj = netcdftime.datetime(year, month + 1, 1)

    start_next_month = cf_units.date2num(start_next_month_obj, ref_units,
                                         calendar)

    end_this_month = cf_units.num2date(start_next_month - 1, ref_units,
                                       calendar)

    return end_this_month.day
Example #14
0
 def create_data_object(self, filenames, variable):
     data_dict = {}  #initialise data dictionary
     inData = netCDF4.Dataset(filenames[0])  #open netCDF file
     data_dict['longitude'] = np.array(
         inData.variables['lon'])  #extract longitudes
     data_dict['latitude'] = np.array(
         inData.variables['lat'])  #extract latitudes
     origTimes = np.array(inData.variables['time_counter'])  #extract times
     #Convert time to days since
     niceDateTime = cf_units.num2date(origTimes,
                                      'seconds since 1999-01-01 00:00:00',
                                      'gregorian')
     data_dict['time'] = cf_units.date2num(
         niceDateTime, 'days since 1600-01-01 00:00:00', 'gregorian')
     data_dict[variable] = np.array(
         inData.variables[variable])  #extract requested variable
     inData.close()  #close netCDF file
     coords = self._create_coord_list(filenames, data_dict)
     return UngriddedData(
         data_dict[variable],
         Metadata(name=variable,
                  long_name=variable,
                  shape=(len(data_dict), ),
                  missing_value=-999.0,
                  units="1"), coords)
Example #15
0
    def mask_bad_data(self, params, output):
        """
        If bad points are known to exist in a dataset then these are masked.

        :param iris.cube.Cube params: Cube to reformat
        :param iris.cube.CubeList output: Cube list to contain reformatted cubes
        :param np.array t_constr: A two element array for specifying start and
        end year of data
        :return iris.cube.Cube: Reformatted cube
        """
        cube = params.get()

        simulation_label = cube.coord('simulation_label').points[0]

        if 'CMCC-CM2-VHR4' in simulation_label:
            dt = datetime(2003, 2, 2, 12, 0, 0)
            tc = cube.coord('time')
            numeric_date = cf_units.date2num(dt, tc.units.name,
                                             tc.units.calendar)
            time_point_array = np.where(tc.points == numeric_date)[0]
            if len(time_point_array) == 0:
                logger.warning('Cannot mask. {} not found in {}'.format(
                    dt, simulation_label))
            else:
                time_point_index = time_point_array[0]
                cube.data[time_point_index, ...] = np.ma.masked
                logger.debug('Masking bad data for {} at {}'.format(
                    simulation_label, dt))
        else:
            logger.debug(
                'No data requires masking for {}'.format(simulation_label))
        output.append(cube)
def datetime_to_numdateval(year=2018, month=9, day=12, hour=5, minutes=43):
    """
    Convert date and time to a numdateval for use in a cube

    Args:
        year (int):
           require year, default is 2018
        month (int):
           require year, default is 9
        day (int):
           require year, default is 12
        hour (int):
           require year, default is 5
        minutes (int):
           require year, default is 43

    Default values should be roughly sunrise in Exeter.

    Returns:
        float:
           date and time as a value relative to time_origin
    """

    time_origin = "hours since 1970-01-01 00:00:00"
    calendar = "gregorian"
    dateval = datetime.datetime(year, month, day, hour, minutes)
    numdateval = date2num(dateval, time_origin, calendar)
    return numdateval
Example #17
0
    def test_multiple_coordinate_expanded(self):
        """Test that expand_bound produces sensible bounds when more than one
        coordinate is operated on, in this case expanding both the time and
        forecast period coordinates."""
        time_point = np.around(
            date2num(dt(2015, 11, 19, 3), TIME_UNIT,
                     CALENDAR)).astype(np.int64)
        expected_result_time = iris.coords.DimCoord(
            [time_point],
            bounds=self.expected_bounds_seconds,
            standard_name='time',
            units=TIME_UNIT)
        expected_result_fp = iris.coords.DimCoord(
            [10800],
            bounds=[0, 10800],
            standard_name='forecast_period',
            units='seconds')

        result = expand_bounds(self.cubelist[0], self.cubelist, {
            'time': 'upper',
            'forecast_period': 'upper'
        })
        self.assertEqual(result.coord('time'), expected_result_time)
        self.assertEqual(result.coord('forecast_period'), expected_result_fp)

        self.assertEqual(result.coord('time').dtype, np.int64)
Example #18
0
    def phenomenon_points(self, time_unit):
        """
        Return the phenomenon time point offset from the epoch time reference
        measured in the appropriate time units.

        """
        time_reference = "%s since epoch" % time_unit
        return cf_units.date2num(self._phenomenonDateTime, time_reference, cf_units.CALENDAR_GREGORIAN)
Example #19
0
def date2num(date, unit, calendar):
    if ('common_year' in unit):
        my_unit = unit.replace('common_year', 'day')
        my_conversion = 365.
    else:
        my_unit = unit
        my_conversion = 1.
    num = cf_units.date2num(date, my_unit, calendar)
    return num/my_conversion
Example #20
0
    def phenomenon_points(self, time_unit):
        """
        Return the phenomenon time point offset from the epoch time reference
        measured in the appropriate time units.

        """
        time_reference = '%s since epoch' % time_unit
        return cf_units.date2num(self._phenomenonDateTime, time_reference,
                                 cf_units.CALENDAR_GREGORIAN)
Example #21
0
 def test_sequence(self):
     dates = [datetime.datetime(1970, 1, 1, 0, 0, 20),
              datetime.datetime(1970, 1, 1, 0, 0, 40),
              datetime.datetime(1970, 1, 1, 0, 1),
              datetime.datetime(1970, 1, 1, 0, 1, 20),
              datetime.datetime(1970, 1, 1, 0, 1, 40)]
     exp = [20., 40., 60., 80, 100.]
     res = date2num(dates, self.unit, self.calendar)
     np.testing.assert_array_almost_equal(exp, res, decimal=4)
Example #22
0
def _create_time_point(time):
    """Returns a coordinate point with appropriate units and datatype
    from a datetime.datetime instance.

    Args:
        time (datetime.datetime)
    """
    point = date2num(time, ctt.TIME_REFERENCE_UNIT, "gregorian")
    return np.round(point).astype(ctt.TIME_REFERENCE_DTYPE)
Example #23
0
def months_coord_to_days_coord(coord):
    """Convert a dimension coordinate from 'months since' to 'days since'

    This function uses the `calendar.monthrange` function to calculate
    the days per month, and sets the lower and upper bound for each month.
    Once the bounds have been set, `cf_units.date2num` is used to convert
    the bounds to numeric values, in days since the original offset.
    These bounds are averaged, to produce midpoints, which are the actual
    points for the new dimension coordinate. The new dimension coordinate
    also includes bounds, which the original may not have.

    """

    units = coord.units
    origin = units.origin
    # Assume an origin format of "<unit> since <start-date>", so we can split on 'since'
    step, startdate = map(str.strip, origin.split('since'))
    if step != 'months':
        raise ValueError('units step is not months')

    # Parse the starting date; assume it has a YYYY-MM-DD HH:MM:SS format,
    # or YYYY-MM-DD without the timestamp
    # Note: leading zeros for months, days, hours, minutes or seconds
    # may be safely ignored: 2010-1-1 or 2010-01-01 will both parse fine
    try:
        t0 = datetime.strptime(startdate, "%Y-%m-%d %H:%M:%S")  # pylint: disable=invalid-name
    except ValueError:
        t0 = datetime.strptime(startdate, "%Y-%m-%d")  # pylint: disable=invalid-name

    points = coord.points.astype(np.int)
    bounds = []
    # Remember that 'point's are in whole months
    for point in points:
        year = t0.year + point // 12
        month = t0.month + point % 12
        current = datetime(year, month, 1)
        # Get number of days for this year (ignore starting weekday number)
        _, ndays = monthrange(year, month)
        # And set the boundary dates for this month
        bounds.append([current, datetime(year, month, ndays)])

    # date2num accepts a two-dimensional numpy array
    bounds = np.array(bounds)
    boundpoints = date2num(bounds,
                           unit=f'days since {startdate}',
                           calendar='gregorian')

    midpoints = boundpoints.mean(axis=1)
    day_coord = DimCoord(midpoints,
                         bounds=boundpoints,
                         standard_name=coord.standard_name,
                         long_name=coord.long_name,
                         units=f'days since {startdate}',
                         var_name=coord.var_name)

    return day_coord
def callback(cube, field, filename):
    from re import sub
    if '.nc' in filename:
        timeobj = datetime.datetime.strptime(sub("\D", "", os.path.basename(filename)),"%Y%m%d%H%M")
        base_date=datetime.datetime(1970,1,1)
        time_units='days since '+ base_date.strftime('%Y-%m-%d')
        time_days=date2num(timeobj, time_units, CALENDAR_STANDARD)
        cube.coord('time').points=time_days
        cube.coord('time').units=time_units
    return cube
Example #25
0
def time_coords_for_test_cubes():
    """Set up time coordinates in human-readable format"""
    tunit = "seconds since 1970-01-01 00:00:00"
    calendar = "gregorian"

    time_point_seconds = np.round(
        date2num(datetime(2015, 11, 19, 2), tunit, calendar)).astype(np.int64)

    frt_points = []
    for i in range(3):
        frt_points.append(
            np.round(date2num(datetime(2015, 11, 19, i), tunit,
                              calendar)).astype(np.int64))

    time_coord = AuxCoord([time_point_seconds], "time", units=tunit)
    frt_coord = DimCoord(frt_points, "forecast_reference_time", units=tunit)
    fp_coord = AuxCoord([7200, 3600, 0], "forecast_period", units="seconds")

    return time_coord, frt_coord, fp_coord
Example #26
0
 def test_multidim_sequence(self):
     dates = [[datetime.datetime(1970, 1, 1, 0, 0, 20),
               datetime.datetime(1970, 1, 1, 0, 0, 40),
               datetime.datetime(1970, 1, 1, 0, 1)],
              [datetime.datetime(1970, 1, 1, 0, 1, 20),
               datetime.datetime(1970, 1, 1, 0, 1, 40),
               datetime.datetime(1970, 1, 1, 0, 2)]]
     exp_shape = (2, 3)
     res = date2num(dates, self.unit, self.calendar)
     self.assertEqual(exp_shape, res.shape)
Example #27
0
def _create_time_point(time):
    """Returns a coordinate point with appropriate units and datatype
    from a datetime.datetime instance.

    Args:
        time (datetime.datetime)
    """
    coord_spec = TIME_COORDS["time"]
    point = date2num(time, coord_spec.units, coord_spec.calendar)
    return np.around(point).astype(coord_spec.dtype)
def _extract_file_metadata(file_path):
    """
    Extracts metadata from file name and returns dictionary.
    """
    # e.g. tasmax_day_IPSL-CM5A-LR_amip4K_r1i1p1_18590101-18591230.nc
    keys = ("var_id", "table", "climate_model", "experiment", "ensemble",
            "time_range")

    items = os.path.splitext(os.path.basename(file_path))[0].split("_")
    data = {}

    for i in range(len(items)):
        key = keys[i]
        value = items[i]

        if key == "time_range":
            start_time, end_time = value.split("-")

            data["start_time"] = cf_units.date2num(
                _date_from_string(start_time), TIME_UNITS, CALENDAR)

            data["end_time"] = cf_units.date2num(_date_from_string(end_time),
                                                 TIME_UNITS, CALENDAR)

            data["time_units"] = TIME_UNITS
            data["calendar"] = CALENDAR

        elif key == "table":
            data['table'] = value
            for fv in FREQUENCY_VALUES:
                if fv.lower() in value.lower():
                    data['frequency'] = fv
                    break
            if 'frequency' not in data:
                data['frequency'] = ''
        else:
            data[key] = value

    return data
 def create_data_object(self, filenames, variable):
     data_dict = {} #initialise data dictionary
     inData = netCDF4.Dataset(filenames[0]) #open netCDF file
     data_dict['longitude'] = np.array(inData.variables['lon']) #extract longitudes
     data_dict['latitude'] = np.array(inData.variables['lat']) #extract latitudes
     origTimes = np.array(inData.variables['time_counter']) #extract times
     #Convert time to days since 
     niceDateTime = cf_units.num2date(origTimes,'seconds since 1999-01-01 00:00:00', 'gregorian')
     data_dict['time'] = cf_units.date2num(niceDateTime,'days since 1600-01-01 00:00:00', 'gregorian')
     data_dict[variable] = np.array(inData.variables[variable])  #extract requested variable
     inData.close() #close netCDF file
     coords = self._create_coord_list(filenames,data_dict)
     return UngriddedData(data_dict[variable],Metadata(name=variable,long_name=variable,shape=(len(data_dict),),missing_value=-999.0,units="1"),coords)
Example #30
0
 def test_basic_time(self):
     """Test that expand_bound produces sensible bounds."""
     time_point = np.around(
         date2num(dt(2015, 11, 19, 3), TIME_UNIT,
                  CALENDAR)).astype(np.int64)
     expected_result = iris.coords.DimCoord(
         [time_point],
         bounds=self.expected_bounds_seconds,
         standard_name="time",
         units=TIME_UNIT,
     )
     result = expand_bounds(self.cubelist[0], self.cubelist, ["time"])
     self.assertEqual(result.coord("time"), expected_result)
Example #31
0
def fix_time_coord_duveiller2018(cube):
    """Fix the time coordinate for dataset Duveiller2018."""
    # Rename 'Month' to 'time'
    cube.coord('Month').rename('time')

    # Create arrays for storing datetime objects
    custom_time = np.zeros((12), dtype=object)
    custom_time_bounds = np.empty((12, 2), dtype=object)
    custom_time_units = 'days since 1950-01-01 00:00:00.0'

    # Now fill the object arrays defined above with datetime objects
    # corresponding to correct time and time_bnds
    for i in range(custom_time_bounds.shape[0]):
        n_month = i + 1  # we start with month number 1, at position 0
        # Start with time_bnds
        time_bnd_a = datetime.datetime(2010, n_month, 1)
        if n_month == 12:
            time_bnd_b = datetime.datetime(2011, 1, 1)
        else:
            time_bnd_b = datetime.datetime(2010, n_month + 1, 1)
        # Get time 'point' from midpoint between bnd_a and bnd_b
        time_midpoint = time_bnd_a + 0.5 * (time_bnd_b - time_bnd_a)
        custom_time_bounds[n_month - 1, 0] = time_bnd_a
        custom_time_bounds[n_month - 1, 1] = time_bnd_b
        custom_time[n_month - 1] = time_midpoint

    # Convert them
    time_bnds = cf_units.date2num(custom_time_bounds, custom_time_units,
                                  cf_units.CALENDAR_GREGORIAN)
    time_midpoints = cf_units.date2num(custom_time, custom_time_units,
                                       cf_units.CALENDAR_GREGORIAN)

    # Add them to the cube
    cube.coord('time').bounds = time_bnds
    cube.coord('time').points = time_midpoints

    # Set the correct time unit, as defined above
    cube.coord('time').units = cf_units.Unit(custom_time_units)
Example #32
0
 def test_basic_time_upper(self):
     """Test that expand_bound produces sensible bounds
     when given arg 'upper'"""
     time_point = np.around(
         date2num(dt(2015, 11, 19, 3), TIME_UNIT,
                  CALENDAR)).astype(np.int64)
     expected_result = iris.coords.DimCoord(
         [time_point],
         bounds=self.expected_bounds_seconds,
         standard_name='time',
         units=TIME_UNIT)
     result = expand_bounds(self.cubelist[0], self.cubelist,
                            {'time': 'upper'})
     self.assertEqual(result.coord('time'), expected_result)
 def _create_coord_list(self, filenames, data=None):
     if data is None:
         data = {} #initialise data dictionary
         inData = netCDF4.Dataset(filenames[0]) #open netCDF file
         data['longitude'] = np.array(inData.variables['lon']) #extract longitudes
         data['latitude'] = np.array(inData.variables['lat']) #extract latitudes
         origTimes = np.array(inData.variables['time_counter']) #extract times
         #Convert time to days since 
         niceDateTime = cf_units.num2date(origTimes,'seconds since 1999-01-01 00:00:00', 'gregorian')
         data['time_counter'] = cf_units.date2num(niceDateTime,'days since 1600-01-01 00:00:00', 'gregorian')
         inData.close() #close netCDF file
     coords = CoordList() #initialise coordinate list
     #Append latitudes and longitudes to coordinate list:
     coords.append(Coord(data['longitude'],Metadata(name="longitude",long_name='longitude',standard_name='longitude',shape=(len(data),),missing_value=-999.0,units="degrees_east",range=(-180, 180)),"x"))
     coords.append(Coord(data['latitude'],Metadata(name="latitude",long_name='latitude',standard_name='latitude',shape=(len(data),),missing_value=-999.0,units="degrees_north",range=(-90, 90)),"y"))
     coords.append(Coord(data['time'],Metadata(name="time",long_name='time',standard_name='time',shape=(len(data),),missing_value=-999.0,units="days since 1600-01-01 00:00:00"),"t"))
     return coords