Esempio n. 1
0
	def add_times(cube, time):
		coord_cat.add_month(cube, time, name='month')
		coord_cat.add_season(cube, time, name='clim_season')
		coord_cat.add_year(cube, time, name='year')
		coord_cat.add_day_of_year(cube, time, name='day_number')
		coord_cat.add_season_year(cube, time, name='season_year')
		return cube
Esempio n. 2
0
def add_extra_time_coords(cube):
    """
    Adds new coordinate for indexing a given simulation based on model and
    ensemble and adds additional time coordinates for unit manipulation
    """
    if not cube.coords('year'):
        icc.add_year(cube, 'time')
    if not cube.coords('month'):
        icc.add_month(cube, 'time')
    if not cube.coords('month_number'):
        icc.add_month_number(cube, 'time')
    if not cube.coords('day_of_month'):
        icc.add_day_of_month(cube, 'time')
    if not cube.coords('hour'):
        icc.add_hour(cube, 'time')
    return cube
    def test_basic(self):
        cube = self.cube
        time_coord = self.time_coord

        ccat.add_year(cube, time_coord, 'my_year')
        ccat.add_day_of_month(cube, time_coord, 'my_day_of_month')
        ccat.add_day_of_year(cube, time_coord, 'my_day_of_year')

        ccat.add_month(cube, time_coord, 'my_month')
        with warnings.catch_warnings(record=True):
            ccat.add_month_shortname(cube, time_coord, 'my_month_shortname')
        ccat.add_month_fullname(cube, time_coord, 'my_month_fullname')
        ccat.add_month_number(cube, time_coord, 'my_month_number')

        ccat.add_weekday(cube, time_coord, 'my_weekday')
        ccat.add_weekday_number(cube, time_coord, 'my_weekday_number')
        with warnings.catch_warnings(record=True):
            ccat.add_weekday_shortname(cube, time_coord,
                                       'my_weekday_shortname')
        ccat.add_weekday_fullname(cube, time_coord, 'my_weekday_fullname')

        ccat.add_season(cube, time_coord, 'my_season')
        ccat.add_season_number(cube, time_coord, 'my_season_number')
        with warnings.catch_warnings(record=True):
            ccat.add_season_month_initials(cube, time_coord,
                                           'my_season_month_initials')
        ccat.add_season_year(cube, time_coord, 'my_season_year')

        # also test 'generic' categorisation interface
        def _month_in_quarter(coord, pt_value):
            date = coord.units.num2date(pt_value)
            return (date.month - 1) % 3

        ccat.add_categorised_coord(cube,
                                   'my_month_in_quarter',
                                   time_coord,
                                   _month_in_quarter)

        # To ensure consistent results between 32-bit and 64-bit
        # platforms, ensure all the numeric categorisation coordinates
        # are always stored as int64.
        for coord in cube.coords():
            if coord.long_name is not None and coord.points.dtype.kind == 'i':
                coord.points = coord.points.astype(np.int64)

        # check values
        self.assertCML(cube, ('categorisation', 'quickcheck.cml'))
Esempio n. 4
0
def extract_cube_monthmean(infile, stash_codes):
    """ Extract specified fields, with multi-annual meaning if req """

    import iris.coord_categorisation as ircat
    # For easier categorisation of data into months

    # Extract fields and check --if less than 12, flag Error
    #   If more than (and multiple of) 12, derive multi-annual
    #   monthly means and return

    fieldcons = []  # List of stash constraints
    for spc in stash_codes:
        fieldcons.append(iris.AttributeConstraint(STASH=spc))

    incubes = iris.load(infile, constraints=fieldcons, callback=ukca_callback)

    #### ---- For now, expect the coordinate to be called 'time' ----
    #####  %%% Future --use Iris terminology for 'first dimension' -need to find example  -- %%%%
    tdim = incubes[0].coord('time').shape[0]
    log_msg('*** TDIM : {:d} months of data detected. ****'.format(tdim))
    if tdim < 12:
        raise Evalerror(
            'EXTR_CUBE: Atleast 12 (month) records expected. Found {:d}'.
            format(tdim))
    # If records/ files > 12 and multiple of 12, attempt multi-annual meaning
    if tdim > 12:
        if tdim % 12 != 0:
            raise Evalerror('EXTR_CUBE: Expected multiple of 12 (months).'+\
                               ' Found {:d}'.format(tdim) )

        log_msg('EXTR_CUBE: ' + str(tdim / 12) + ' years of data detected.')
        log_msg(' Performing multi-annual mean\n')

        oucubes = []  # output cube list
        for n, cb in enumerate(incubes):
            # Add artificial/ auxilliary dim 'month' and average over this.
            # First check if this dimension already exists
            try:
                dumm = cb.coord('month')
            except:
                # not found --add. %%%% Again assuming coord called 'time' %%%%%%
                ircat.add_month(cb, 'time', name='month')

            incubes[n] = cb.aggregated_by('month', iris.analysis.MEAN)

    return incubes
Esempio n. 5
0
def climatology(cube, kind='month'):
    """Calculate a climatology for a cube.  Can do monthly or yearly.

    Args:
        cube (iris.cube.Cube)
        kind (Optional[str]): 'month' or 'year'

    Returns:
        iris.cube.Cube

    """
    aux_coords = [aux_coord.name() for aux_coord in cube.aux_coords]
    if 'year' not in aux_coords:
        cat.add_year(cube, 'time')
    if 'month' not in aux_coords:
        cat.add_month(cube, 'time')
        cat.add_month_number(cube, 'time')
    out = cube.aggregated_by(kind, iris.analysis.MEAN)

    # If the data don't start in January the time coordinate will no longer
    # be monotonic. Fix this.
    if (kind == 'month') and (not out.coord('time').is_monotonic()):

        # Reorder the data so January is first.
        jan_index = np.where(out.coord('month').points == 'Jan')[0][0]
        ntim = 12
        sort_indices = range(jan_index, ntim) + range(0, jan_index)
        out = out[sort_indices]

        # Create a new time coordinate which is monotonic.
        startyear = int(out.coord('time').units.num2date(0).year)
        newtime_points = [
            netcdftime.datetime(startyear + (m / 12), (m % 12) + 1, 1)
            for m in out.coord('month_number').points.astype(int) - 1
        ]
        time_units = out.coord('time').units
        newtime_points = time_units.date2num(newtime_points)
        newtime = iris.coords.DimCoord(newtime_points,
                                       units=time_units,
                                       standard_name='time')
        data_dim = out.coord_dims('time')[0]
        out.remove_coord('time')
        out.add_dim_coord(newtime, data_dim)

    return out
Esempio n. 6
0
    def test_basic(self):
        cube = self.cube
        time_coord = self.time_coord

        ccat.add_year(cube, time_coord, 'my_year')
        ccat.add_day_of_month(cube, time_coord, 'my_day_of_month')
        ccat.add_day_of_year(cube, time_coord, 'my_day_of_year')

        ccat.add_month(cube, time_coord, 'my_month')
        with warnings.catch_warnings(record=True):
            ccat.add_month_shortname(cube, time_coord, 'my_month_shortname')
        ccat.add_month_fullname(cube, time_coord, 'my_month_fullname')
        ccat.add_month_number(cube, time_coord, 'my_month_number')

        ccat.add_weekday(cube, time_coord, 'my_weekday')
        ccat.add_weekday_number(cube, time_coord, 'my_weekday_number')
        with warnings.catch_warnings(record=True):
            ccat.add_weekday_shortname(cube, time_coord,
                                       'my_weekday_shortname')
        ccat.add_weekday_fullname(cube, time_coord, 'my_weekday_fullname')

        ccat.add_season(cube, time_coord, 'my_season')
        ccat.add_season_number(cube, time_coord, 'my_season_number')
        with warnings.catch_warnings(record=True):
            ccat.add_season_month_initials(cube, time_coord,
                                           'my_season_month_initials')
        ccat.add_season_year(cube, time_coord, 'my_season_year')

        # also test 'generic' categorisation interface
        def _month_in_quarter(coord, pt_value):
            date = coord.units.num2date(pt_value)
            return (date.month - 1) % 3

        ccat.add_categorised_coord(cube, 'my_month_in_quarter', time_coord,
                                   _month_in_quarter)

        # To ensure consistent results between 32-bit and 64-bit
        # platforms, ensure all the numeric categorisation coordinates
        # are always stored as int64.
        for coord in cube.coords():
            if coord.long_name is not None and coord.points.dtype.kind == 'i':
                coord.points = coord.points.astype(np.int64)

        # check values
        self.assertCML(cube, ('categorisation', 'quickcheck.cml'))
    def test_basic(self):
        #make a series of 'day numbers' for the time, that slide across month boundaries
        day_numbers =  np.arange(0, 600, 27, dtype=np.int32)
        
        cube = iris.cube.Cube(day_numbers, long_name='test cube', units='metres')

        #use day numbers as data values also (don't actually use this for anything)
        cube.data = day_numbers 
        
        time_coord = iris.coords.DimCoord(
            day_numbers, standard_name='time', units=iris.unit.Unit('days since epoch', 'gregorian'))
        cube.add_dim_coord(time_coord, 0)

        #add test coordinates for examples wanted    
        ccat.add_year(cube, time_coord)
        ccat.add_day_of_month(cube, 'time')    #NB test passing coord-name instead of coord itself

        ccat.add_month(cube, time_coord)
        ccat.add_month_shortname(cube, time_coord, name='month_short')
        ccat.add_month_fullname(cube, time_coord, name='month_full')
        ccat.add_month_number(cube, time_coord, name='month_number')
        
        ccat.add_weekday(cube, time_coord)
        ccat.add_weekday_number(cube, time_coord, name='weekday_number')
        ccat.add_weekday_shortname(cube, time_coord, name='weekday_short')
        ccat.add_weekday_fullname(cube, time_coord, name='weekday_full')

        ccat.add_season(cube, time_coord)
        ccat.add_season_number(cube, time_coord, name='season_number')
        ccat.add_season_month_initials(cube, time_coord, name='season_months')
        ccat.add_season_year(cube, time_coord, name='year_ofseason')
        
        #also test 'generic' categorisation interface
        def _month_in_quarter(coord, pt_value):
            date = coord.units.num2date(pt_value)
            return (date.month - 1) % 3

        ccat.add_categorised_coord(cube, 'month_in_quarter', time_coord, _month_in_quarter)

        for coord_name in ['month_number', 'month_in_quarter', 'weekday_number', 'season_number', 'year_ofseason', 'year', 'day']:
            cube.coord(coord_name).points = cube.coord(coord_name).points.astype(np.int64)

        #check values
        self.assertCML(cube, ('categorisation', 'quickcheck.cml'))
Esempio n. 8
0
def climatology(cube, kind='month'):
    """Calculate a climatology for a cube.  Can do monthly or yearly.

    Args:
        cube (iris.cube.Cube)
        kind (Optional[str]): 'month' or 'year'

    Returns:
        iris.cube.Cube

    """
    aux_coords = [aux_coord.name() for aux_coord in cube.aux_coords]
    if 'year' not in aux_coords:
        cat.add_year(cube, 'time')
    if 'month' not in aux_coords:
        cat.add_month(cube, 'time')
        cat.add_month_number(cube, 'time')
    out = cube.aggregated_by(kind, iris.analysis.MEAN)

    # If the data don't start in January the time coordinate will no longer
    # be monotonic. Fix this.
    if (kind == 'month') and (not out.coord('time').is_monotonic()):

        # Reorder the data so January is first.
        jan_index = np.where(out.coord('month').points == 'Jan')[0][0]
        ntim = 12
        sort_indices = range(jan_index, ntim) + range(0, jan_index)
        out = out[sort_indices]

        # Create a new time coordinate which is monotonic.
        startyear = int(out.coord('time').units.num2date(0).year)
        newtime_points = [netcdftime.datetime(startyear + (m / 12), (m % 12) + 1, 1)
                          for m in out.coord('month_number').points.astype(int)-1]
        time_units = out.coord('time').units
        newtime_points = time_units.date2num(newtime_points)
        newtime = iris.coords.DimCoord(newtime_points, units=time_units,
                                       standard_name='time')
        data_dim = out.coord_dims('time')[0]
        out.remove_coord('time')
        out.add_dim_coord(newtime, data_dim)

    return out
Esempio n. 9
0
    def test_basic(self):
        cube = self.cube
        time_coord = self.time_coord

        ccat.add_year(cube, time_coord, "my_year")
        ccat.add_day_of_month(cube, time_coord, "my_day_of_month")
        ccat.add_day_of_year(cube, time_coord, "my_day_of_year")

        ccat.add_month(cube, time_coord, "my_month")
        ccat.add_month_fullname(cube, time_coord, "my_month_fullname")
        ccat.add_month_number(cube, time_coord, "my_month_number")

        ccat.add_weekday(cube, time_coord, "my_weekday")
        ccat.add_weekday_number(cube, time_coord, "my_weekday_number")
        ccat.add_weekday_fullname(cube, time_coord, "my_weekday_fullname")

        ccat.add_season(cube, time_coord, "my_season")
        ccat.add_season_number(cube, time_coord, "my_season_number")
        ccat.add_season_year(cube, time_coord, "my_season_year")

        # also test 'generic' categorisation interface
        def _month_in_quarter(coord, pt_value):
            date = coord.units.num2date(pt_value)
            return (date.month - 1) % 3

        ccat.add_categorised_coord(
            cube, "my_month_in_quarter", time_coord, _month_in_quarter
        )

        # To ensure consistent results between 32-bit and 64-bit
        # platforms, ensure all the numeric categorisation coordinates
        # are always stored as int64.
        for coord in cube.coords():
            if coord.long_name is not None and coord.points.dtype.kind == "i":
                coord.points = coord.points.astype(np.int64)

        # check values
        self.assertCML(cube, ("categorisation", "quickcheck.cml"))
Esempio n. 10
0
def get_niwa_obs(o3_file, conv_fact=1.0e6, monthly_mean=True):
    """ Reads NIWA data and returns as monthly means, if requested """

    # o3_file - expects NetCDF data
    niwao3, = iris.load(o3_file)

    # Apply any conversion requested - default :mole/mole volume to ppmv
    niwao3.data = niwao3.data * conv_fact

    if monthly_mean:

        # Collapse data to multi-annual monthly mean
        # Add 'month' aux coord and tag records by months
        #  for easier meaning over the years.
        #  --verified with explicit meaning at sample pts
        import iris.coord_categorisation as ircat

        ircat.add_month(niwao3, 'time', name='month')
        niwao3m = niwao3.aggregated_by('month', iris.analysis.MEAN)
        niwao3m.rename(niwao3.name())
        return niwao3m
    else:
        return niwao3
Esempio n. 11
0
# Create a VectorWind instance to handle the computations.
w = VectorWind(uwnd, vwnd)

# Compute components of rossby wave source: absolute vorticity, divergence,
# irrotational (divergent) wind components, gradients of absolute vorticity.
eta = w.absolutevorticity()
div = w.divergence()
uchi, vchi = w.irrotationalcomponent()
etax, etay = w.gradient(eta)
etax.units = 'm**-1 s**-1'
etay.units = 'm**-1 s**-1'

# Combine the components to form the Rossby wave source term.
S = eta * -1. * div - (uchi * etax + vchi * etay)
S.coord('longitude').attributes['circular'] = True

# Pick out the field for December at 200 hPa.
time_constraint = iris.Constraint(month='Dec')
add_month(S, 'time')
S_dec = S.extract(time_constraint)

# Plot Rossby wave source.
clevs = [-30, -25, -20, -15, -10, -5, 0, 5, 10, 15, 20, 25, 30]
ax = plt.subplot(111, projection=ccrs.PlateCarree(central_longitude=180))
fill = iplt.contourf(S_dec * 1e11, clevs, cmap=plt.cm.RdBu_r, extend='both')
ax.coastlines()
ax.gridlines()
plt.colorbar(fill, orientation='horizontal')
plt.title('Rossby Wave Source ($10^{-11}$s$^{-1}$)', fontsize=16)
plt.show()
Esempio n. 12
0
def add_time_coord_cats(cube):
    """
    This function takes in an iris cube, and adds a range of
    numeric co-ordinate categorisations to it. Depending
    on the data, not all of the coords added will be relevant.

    args
    ----
    cube: iris cube that has a coordinate called 'time'

    Returns
    -------
    Cube: cube that has new time categorisation coords added

    Notes
    -----
    test

    A simple example:

    >>> file = os.path.join(conf.DATA_DIR, 'mslp.daily.rcm.viet.nc')
    >>> cube = iris.load_cube(file)
    >>> coord_names = [coord.name() for coord in cube.coords()]
    >>> print((', '.join(coord_names)))
    time, grid_latitude, grid_longitude
    >>> ccube = add_time_coord_cats(cube)
    >>> coord_names = [coord.name() for coord in ccube.coords()]
    >>> print((', '.join(coord_names)))
    time, grid_latitude, grid_longitude, day_of_month, day_of_year, month, \
month_number, season, season_number, year
    >>> # print every 50th value of the added time cat coords
    ... for c in coord_names[3:]:
    ...     print(ccube.coord(c).long_name)
    ...     print(ccube.coord(c).points[::50])
    ...
    day_of_month
    [ 1 21 11  1 21 11  1 21]
    day_of_year
    [  1  51 101 151 201 251 301 351]
    month
    ['Jan' 'Feb' 'Apr' 'Jun' 'Jul' 'Sep' 'Nov' 'Dec']
    month_number
    [ 1  2  4  6  7  9 11 12]
    season
    ['djf' 'djf' 'mam' 'jja' 'jja' 'son' 'son' 'djf']
    season_number
    [0 0 1 2 2 3 3 0]
    year
    [2000 2000 2000 2000 2000 2000 2000 2000]

    """

    # most errors pop up when you try to add a coord that has
    # previously been added, or the cube doesn't contain the
    # necessary attribute.

    ccube = cube.copy()

    # numeric
    try:
        iccat.add_day_of_year(ccube, "time")
    except AttributeError as err:
        print(("add_time_coord_cats: {}, skipping . . . ".format(err)))
    except ValueError as err:
        print(("add_time_coord_cats: {}, skipping . . . ".format(err)))
    try:
        iccat.add_day_of_month(ccube, "time")
    except AttributeError as err:
        print(("add_time_coord_cats: {}, skipping . . . ".format(err)))
    except ValueError as err:
        print(("add_time_coord_cats: {}, skipping . . . ".format(err)))
    try:
        iccat.add_month_number(ccube, "time")
    except AttributeError as err:
        print(("add_time_coord_cats: {}, skipping . . . ".format(err)))
    except ValueError as err:
        print(("add_time_coord_cats: {}, skipping . . . ".format(err)))
    try:
        iccat.add_season_number(ccube, "time")
    except AttributeError as err:
        print(("add_time_coord_cats: {}, skipping . . . ".format(err)))
    except ValueError as err:
        print(("add_time_coord_cats: {}, skipping . . . ".format(err)))
    try:
        iccat.add_year(ccube, "time")
    except AttributeError as err:
        print(("add_time_coord_cats: {}, skipping . . . ".format(err)))
    except ValueError as err:
        print(("add_time_coord_cats: {}, skipping . . . ".format(err)))
    # strings
    try:
        iccat.add_month(ccube, "time")
    except AttributeError as err:
        print(("add_time_coord_cats: {}, skipping . . . ".format(err)))
    except ValueError as err:
        print(("add_time_coord_cats: {}, skipping . . . ".format(err)))
    try:
        iccat.add_season(ccube, "time")
    except AttributeError as err:
        print(("add_time_coord_cats: {}, skipping . . . ".format(err)))
    except ValueError as err:
        print(("add_time_coord_cats: {}, skipping . . . ".format(err)))

    return ccube
Esempio n. 13
0
    warnings.simplefilter('ignore', UserWarning)
    uwnd = iris.load_cube(example_data_path('uwnd_mean.nc'))
    vwnd = iris.load_cube(example_data_path('vwnd_mean.nc'))
uwnd.coord('longitude').circular = True
vwnd.coord('longitude').circular = True

# Create a VectorWind instance to handle the computation of streamfunction and
# velocity potential.
w = VectorWind(uwnd, vwnd)

# Compute the streamfunction and velocity potential.
sf, vp = w.sfvp()

# Pick out the field for December.
time_constraint = iris.Constraint(month='Dec')
add_month(sf, 'time', name='month')
add_month(vp, 'time', name='month')
sf_dec = sf.extract(time_constraint)
vp_dec = vp.extract(time_constraint)

# Plot streamfunction.
clevs = [-120, -100, -80, -60, -40, -20, 0, 20, 40, 60, 80, 100, 120]
ax = plt.subplot(111, projection=ccrs.PlateCarree(central_longitude=180))
fill_sf = iplt.contourf(sf_dec * 1e-06, clevs, cmap=plt.cm.RdBu_r,
                        extend='both')
ax.coastlines()
ax.gridlines()
plt.colorbar(fill_sf, orientation='horizontal')
plt.title('Streamfunction ($10^6$m$^2$s$^{-1}$)', fontsize=16)

# Plot velocity potential.
Esempio n. 14
0
def main():

    cube = myload()

    icat.add_month(cube, 'time')
    months = cube.coord('month')

    # Set the limits for the loop over years.
    minTime = 0
    maxTime = 90

    for time in range(minTime, maxTime):

        # Set up for larger image.
        figSize = [12, 6]
        fig = plt.figure(figsize=figSize, dpi=200)
        rect = 0, 0, 200 * figSize[0], 200 * figSize[1]
        fig.add_axes(rect)
        geo_axes = plt.axes(projection=ccrs.PlateCarree())

        # We need to fix the boundary of the figure (otherwise we get a black border at left & top).
        # Cartopy removes matplotlib's axes.patch (which normally defines the boundary) and
        # replaces it with outline_patch and background_patch.  It's the former which is causing
        # the black border.  Get the axis object and make its outline patch invisible.
        geo_axes.outline_patch.set_visible(False)
        plt.margins(0, 0)
        fig.subplots_adjust(left=0, right=1, bottom=0, top=1)

        # Contour plot the temperatures and add the coastline.
        iplt.contourf(cube[time],
                      levels=(0.000008, 0.00002, 0.00007, 0.0002, 0.001, 0.05),
                      colors=('cyan', 'blue', 'yellow', 'orange', 'red'))

        #-6.4358826, 27.94899
        plt.gca().coastlines()
        #plt.colorbar(boundaries = (-6, -3, 0, 4, 8, 12, 16, 20, 25), values = (-6, -3, 0, 4, 8, 12, 16, 20))

        # Extract the year value and display it (coordinates used in locating the text are
        # those of the data).
        month = months[time].points[0]

        # Display year on both sides of the display.
        plt.text(-110,
                 0,
                 month,
                 horizontalalignment='center',
                 verticalalignment='top',
                 size='large',
                 fontdict={'family': 'monospace'})
        plt.text(70,
                 0,
                 month,
                 horizontalalignment='center',
                 verticalalignment='top',
                 size='large',
                 fontdict={'family': 'monospace'})

        # Now save the plot in an image file.  The files are numbered sequentially, starting
        # from 000.png; this is so that the ffmpeg command can grok them.
        filename = "rainfall/image-%04d.png" % time
        #        plt.savefig(filename, bbox_inches='tight', pad_inches=0)
        plt.savefig(filename, dpi=200)

        # Discard the figure (otherwise the text will be overwritten
        # by the next iteration).
        plt.close()

    print("images made! Now converting to .mp4...")
    create_video()
    print("Opening video...")
Esempio n. 15
0
def mainfunc(run):
    """Main function in stratospheric assessment code."""
    metrics = dict()

    # Set up to only run for 10 year period (eventually)
    year_cons = dict(from_dt=run['from_monthly'], to_dt=run['to_monthly'])

    # Read zonal mean U (lbproc=192) and add month number to metadata
    ucube = load_run_ss(
        run, 'monthly', 'eastward_wind', lbproc=192, **year_cons)
    # Although input data is a zonal mean, iris does not recognise it as such
    # and just reads it as having a single longitudinal coordinate. This
    # removes longitude as a dimension coordinate and makes it a scalar
    # coordinate in line with how a zonal mean would be described.
    # Is there a better way of doing this?
    ucube_cds = [cdt.standard_name for cdt in ucube.coords()]
    if 'longitude' in ucube_cds:
        ucube = ucube.collapsed('longitude', iris.analysis.MEAN)
    if not ucube.coord('latitude').has_bounds():
        ucube.coord('latitude').guess_bounds()
    # check for month_number
    aux_coord_names = [aux_coord.var_name for aux_coord in ucube.aux_coords]
    if 'month_number' not in aux_coord_names:
        icc.add_month_number(ucube, 'time', name='month_number')

    # Read zonal mean T (lbproc=192) and add clim month and season to metadata
    tcube = load_run_ss(
        run, 'monthly', 'air_temperature', lbproc=192,
        **year_cons)  # m01s30i204
    # Although input data is a zonal mean, iris does not recognise it as such
    # and just reads it as having a single longitudinal coordinate. This
    # removes longitude as a dimension coordinate and makes it a scalar
    # coordinate in line with how a zonal mean would be described.
    # Is there a better way of doing this?
    tcube_cds = [cdt.standard_name for cdt in tcube.coords()]
    if 'longitude' in tcube_cds:
        tcube = tcube.collapsed('longitude', iris.analysis.MEAN)
    if not tcube.coord('latitude').has_bounds():
        tcube.coord('latitude').guess_bounds()
    aux_coord_names = [aux_coord.var_name for aux_coord in tcube.aux_coords]
    if 'month' not in aux_coord_names:
        icc.add_month(tcube, 'time', name='month')
    if 'clim_season' not in aux_coord_names:
        icc.add_season(tcube, 'time', name='clim_season')

    # Read zonal mean q (lbproc=192) and add clim month and season to metadata
    qcube = load_run_ss(
        run, 'monthly', 'specific_humidity', lbproc=192,
        **year_cons)  # m01s30i205
    # Although input data is a zonal mean, iris does not recognise it as such
    # and just reads it as having a single longitudinal coordinate. This
    # removes longitude as a dimension coordinate and makes it a scalar
    # coordinate in line with how a zonal mean would be described.
    # Is there a better way of doing this?
    qcube_cds = [cdt.standard_name for cdt in qcube.coords()]
    if 'longitude' in qcube_cds:
        qcube = qcube.collapsed('longitude', iris.analysis.MEAN)
    if not qcube.coord('latitude').has_bounds():
        qcube.coord('latitude').guess_bounds()
    aux_coord_names = [aux_coord.var_name for aux_coord in qcube.aux_coords]
    if 'month' not in aux_coord_names:
        icc.add_month(qcube, 'time', name='month')
    if 'clim_season' not in aux_coord_names:
        icc.add_season(qcube, 'time', name='clim_season')

    # Calculate PNJ metrics
    pnj_metrics(run, ucube, metrics)

    # Calculate QBO metrics
    qbo_metrics(run, ucube, metrics)

    # Calculate polar temperature metrics
    tpole_metrics(run, tcube, metrics)

    # Calculate equatorial temperature metrics
    teq_metrics(run, tcube, metrics)

    # Calculate tropical temperature metrics
    t_metrics(run, tcube, metrics)

    # Calculate tropical water vapour metric
    q_metrics(run, qcube, metrics)

    # Summary metric
    summary_metric(metrics)

    # Make sure all metrics are of type float
    # Need at the moment to populate metrics files
    for key, value in metrics.items():
        metrics[key] = float(value)

    return metrics
def main():

    gppcube = myload('gpp')
    chlrphylcube = myload('chlrphyl')
    #data = np.add(gppcube.data, chlrphylcube.data)
    
    gppLbound = np.amin(gppcube.data)
    gppUbound = np.amax(gppcube.data)



    chlrphylLbound = np.amin(chlrphylcube.data)
    chlrphylUbound = np.amax(chlrphylcube.data)
    print(gppLbound, gppUbound)
    print(chlrphylLbound, chlrphylUbound)
       
    icat.add_month(gppcube, 'time')
    months = gppcube.coord('month')
    
    # Set the limits for the loop over years.  
    minTime = 0
    maxTime = 360
    
    fileIndex = 0
    for time in range(minTime, maxTime):

        # Avoid timesteps that are broken - ie, cause a stop with this error:
	# ValueError: A LinearRing must have at least 3 coordinate tuple
        if time in [12, 310, 320, 347, 351, 354]: 
	   continue

        # Set up for larger image.
        figSize = [12, 6]
        fig = plt.figure(figsize=figSize, dpi=200)
        rect = 0,0,200*figSize[0],200*figSize[1]
        fig.add_axes(rect)
        geo_axes = plt.axes(projection=ccrs.PlateCarree())
	
        # We need to fix the boundary of the figure (otherwise we get a black border at left & top).
        # Cartopy removes matplotlib's axes.patch (which normally defines the boundary) and
        # replaces it with outline_patch and background_patch.  It's the former which is causing
        # the black border.  Get the axis object and make its outline patch invisible.
        geo_axes.outline_patch.set_visible(False)
        plt.margins(0,0)
        fig.subplots_adjust(left=0, right=1, bottom=0, top=1)

        # Contour plot the temperatures and add the coastline.
        
        iplt.contourf(chlrphylcube[time], vmin = chlrphylLbound, vmax = chlrphylUbound, cmap = 'BuGn')
        iplt.contourf(gppcube[time], vmin = gppLbound, vmax = gppUbound, cmap = 'YlGn')
        
        #-6.4358826, 27.94899
        plt.gca().coastlines()
        #plt.colorbar(boundaries = (-6, -3, 0, 4, 8, 12, 16, 20, 25), values = (-6, -3, 0, 4, 8, 12, 16, 20))
        # We need to fix the boundary of the figure (otherwise we get a black border at left & top).
        # Cartopy removes matplotlib's axes.patch (which normally defines the boundary) and
        # replaces it with outline_patch and background_patch.  It's the former which is causing
        # the black border.  Get the axis object and make its outline patch invisible.
        ax = plt.gca()
        ax.outline_patch.set_visible(False)

        # Extract the year value and display it (coordinates used in locating the text are
        # those of the data).
        month = months[time].points[0]

        # Display year on both sides of the display.
        plt.text(-110, 0, month, horizontalalignment='center', 
	         verticalalignment='top', size='large',
	         fontdict={'family' : 'monospace'})
        plt.text( 70, 0, month, horizontalalignment='center', 
	         verticalalignment='top', size='large',
		 fontdict={'family' : 'monospace'})
       
        # Now save the plot in an image file.  The files are numbered sequentially, starting
        # from 000.png; this is so that the ffmpeg command can grok them.
        filename = "gpp_chlrphyl_movie/image-%04d.png" % fileIndex
	fileIndex += 1
#        plt.savefig(filename, bbox_inches='tight', pad_inches=0)
        plt.savefig(filename, dpi=200)
        
        # Discard the figure (otherwise the text will be overwritten
        # by the next iteration).
        plt.close()
       
    print("images made! Now converting to .mp4...")
    create_video()
    print("Opening video...")
Esempio n. 17
0
def read_pr_sm_topo(project_info, model):

    """
    ;; Arguments
    ;;    project_info: dictionary
    ;;          all info from namelist
    ;;
    ;; Return
    ;;    pr: iris cube [time, lat, lon]
    ;;          precipitation time series
    ;;    sm: iris cube [time, lat, lon]
    ;;          soil moisture time series
    ;;    topo: array [lat, lon]
    ;;          topography
    ;;    lon: array [lon]
    ;;          longitude
    ;;    lat: array [lat]
    ;;          latitude
    ;;    time: iris cube coords
    ;;          time info of cube
    ;;    time_bnds_1: float
    ;;          first time_bnd of time series
    ;;
    ;;
    ;; Description
    ;;    Read cmip5 input data for computing the diagnostic
    ;;
    """
    
    import projects
    E = ESMValProject(project_info)
    verbosity = E.get_verbosity()
    #-------------------------
    # Read model info
    #-------------------------

    currProject = getattr(vars()['projects'], model.split_entries()[0])()

    model_info = model.split_entries()

    mip = currProject.get_model_mip(model)
    exp = currProject.get_model_exp(model)
    start_year = currProject.get_model_start_year(model)
    end_year = currProject.get_model_end_year(model)

    years = range(int(start_year), int(end_year) + 1)
    
    '''
    #-------------------------
    # Read model info
    #-------------------------

    model_name = model_info[1]
    time_step = model_info[2]
    exp_fam = model_info[3]
    model_run = model_info[4]
    year_start = model_info[5]
    year_end = model_info[6]
    filedir = model_info[7]

    years = range(int(year_start), int(year_end)+1)
    '''

    
    #-------------------------
    # Input data directories
    #-------------------------
    currDiag = project_info['RUNTIME']['currDiag']

    pr_index = currDiag.get_variables().index('pr')
    pr_field = currDiag.get_field_types()[pr_index]

    sm_index = currDiag.get_variables().index('mrsos')
    sm_field = currDiag.get_field_types()[sm_index]

    indir = currProject.get_cf_outpath(project_info, model)
    in_file = currProject.get_cf_outfile(project_info, model, pr_field, 'pr', mip, exp)
    pr_files = [os.path.join(indir, in_file)]

    in_file = currProject.get_cf_outfile(project_info, model, sm_field, 'mrsos', mip, exp)
    sm_files = [os.path.join(indir, in_file)]
    
    '''
    #-------------------------
    # Input data directories
    #-------------------------
    pr_files = []
    sm_files = []

    for yy in years:

        Patt = filedir+'pr_'+time_step+'_'+model_name+'_'+exp_fam+'_'+\
               model_run+'_'+str(yy)+'*.nc'
        pr_files.append(glob.glob(Patt))

        Patt = filedir+'mrsos_'+time_step+'_'+model_name+'_'+exp_fam+'_'+\
                model_run+'_'+str(yy)+'*.nc'
        sm_files.append(glob.glob(Patt))

    pr_files = [l[0] for l in pr_files if len(l)>0]
    pr_files = sorted(pr_files)

    sm_files = [l[0] for l in sm_files if len(l)>0]
    sm_files = sorted(sm_files)
    '''

    #----------------------
    # Read in precipitation
    #----------------------

    pr_list = []

    for pr_file in pr_files:

        info('Reading precipitation from ' + pr_file, verbosity, required_verbosity=1)

        pr = iris.load(pr_file)[0]

        for at_k in pr.attributes.keys():
            pr.attributes.pop(at_k)

        pr_list.append(pr)

    pr = iris.cube.CubeList(pr_list)
    pr = pr.concatenate()[0]

    # Convert longitude from 0_360 to -180_180

    pr = coord_change([pr])[0]

    # Add metadata: day, month, year

    add_month(pr, 'time')
    add_day_of_month(pr, 'time', name='dom')
    add_year(pr, 'time')

    # Convert units to kg m-2 hr-1

    pr.convert_units('kg m-2 hr-1')

    #-----------------------
    # Read in soil moisture
    #-----------------------

    sm_list = []

    for sm_file in sm_files:

        info('Reading soil moisture from ' + sm_file, verbosity, required_verbosity=1)

        sm = iris.load(sm_file)[0]

        for at_k in sm.attributes.keys():
            sm.attributes.pop(at_k)

        sm_list.append(sm)

    sm = iris.cube.CubeList(sm_list)
    sm = sm.concatenate()[0]

    # Convert longitude from 0_360 to -180_180

    sm = coord_change([sm])[0]

    # Add metadata: day, month, year

    add_month(sm, 'time')
    add_day_of_month(sm, 'time', name='dom')
    add_year(sm, 'time')

    #----------------------------------------------
    # Constrain pr and sm data to latitude 60S_60N
    #----------------------------------------------

    latconstraint = iris.Constraint(latitude=lambda cell: -59.0 <= cell <= 59.0)

    pr = pr.extract(latconstraint)
    sm = sm.extract(latconstraint)

    #---------------------------------------------------
    # Read in grid info: latitude, longitude, timestamp
    #---------------------------------------------------

    lon = sm.coords('longitude')[0].points
    lat = sm.coords('latitude')[0].points
    time = sm.coords('time')

    # --------------------------------------
    # Convert missing data (if any) to -999.
    # --------------------------------------

    try:
        sm.data.set_fill_value(-999)
        sm.data.data[sm.data.mask] = -999.

    except:
        info('no missing data conversion', verbosity, required_verbosity=1)

    #----------------------
    # Read in topography
    #----------------------

    # Topography map specs:
    # latitude 60S_60N
    # longitude 180W_180E
    # model resolution

    #ftopo = currProject.get_cf_fx_file(project_info, model)

    #dt = '>f4'
    #topo = (np.fromfile(ftopo, dtype=dt)).reshape(len(lat), len(lon))

    topo = get_topo(project_info, lon, lat, model)

    #----------------------
    # Read in time bounds
    #----------------------

    indir, infiles = currProject.get_cf_infile(project_info, model, pr_field, 'pr', mip, exp)
    Patt = os.path.join(indir, infiles)
    pr_files = sorted(glob.glob(Patt))

    ncf = nc4.Dataset(pr_files[0])
    time_bnds_1 = ncf.variables['time_bnds'][0][0]
    time_bnds_1 = time_bnds_1 - int(time_bnds_1)
    ncf.close()

    #-----------------------------------------------
    # Return input data to compute sm_pr diagnostic
    #-----------------------------------------------
    return pr, sm, topo, lon, lat, time, time_bnds_1
Esempio n. 18
0
# Create a VectorWind instance to handle the computations.
w = VectorWind(uwnd, vwnd)

# Compute components of rossby wave source: absolute vorticity, divergence,
# irrotational (divergent) wind components, gradients of absolute vorticity.
eta = w.absolutevorticity()
div = w.divergence()
uchi, vchi = w.irrotationalcomponent()
etax, etay = w.gradient(eta)
etax.units = 'm**-1 s**-1'
etay.units = 'm**-1 s**-1'

# Combine the components to form the Rossby wave source term.
S = eta * -1. * div - (uchi * etax + vchi * etay)
S.coord('longitude').attributes['circular'] = True

# Pick out the field for December at 200 hPa.
time_constraint = iris.Constraint(month='Dec')
add_month(S, 'time')
S_dec = S.extract(time_constraint)

# Plot Rossby wave source.
clevs = [-30, -25, -20, -15, -10, -5, 0, 5, 10, 15, 20, 25, 30]
ax = plt.subplot(111, projection=ccrs.PlateCarree(central_longitude=180))
fill = iplt.contourf(S_dec * 1e11, clevs, cmap=plt.cm.RdBu_r, extend='both')
ax.coastlines()
ax.gridlines()
plt.colorbar(fill, orientation='horizontal')
plt.title('Rossby Wave Source ($10^{-11}$s$^{-1}$)', fontsize=16)
plt.show()
Esempio n. 19
0
# Create a VectorWind instance to handle the computations.
w = VectorWind(uwnd, vwnd)

# Compute components of rossby wave source: absolute vorticity, divergence,
# irrotational (divergent) wind components, gradients of absolute vorticity.
eta = w.absolutevorticity()
div = w.divergence()
uchi, vchi = w.irrotationalcomponent()
etax, etay = w.gradient(eta)
etax.units = "m**-1 s**-1"
etay.units = "m**-1 s**-1"

# Combine the components to form the Rossby wave source term.
S = eta * -1.0 * div - (uchi * etax + vchi * etay)
S.coord("longitude").attributes["circular"] = True

# Pick out the field for December at 200 hPa.
time_constraint = iris.Constraint(month="Dec")
add_month(S, "time")
S_dec = S.extract(time_constraint)

# Plot Rossby wave source.
clevs = [-30, -25, -20, -15, -10, -5, 0, 5, 10, 15, 20, 25, 30]
ax = plt.subplot(111, projection=ccrs.PlateCarree(central_longitude=180))
fill = iplt.contourf(S_dec * 1e11, clevs, cmap=plt.cm.RdBu_r, extend="both")
ax.coastlines()
ax.gridlines()
plt.colorbar(fill, orientation="horizontal")
plt.title("Rossby Wave Source ($10^{-11}$s$^{-1}$)", fontsize=16)
plt.show()
Esempio n. 20
0
def read_pr_sm_topo(filedir, years):
    """ 
    ;; Arguments
    ;;    filedir: dir
    ;;          directory with input data
    ;;    years: list of int
    ;;          list of years for the analysis
    ;;
    ;; Return 
    ;;    pr: iris cube [time, lat, lon]
    ;;          precipitation time series
    ;;    sm: iris cube [time, lat, lon]
    ;;          soil moisture time series
    ;;    topo: array [lat, lon]
    ;;          topography
    ;;    lon: array [lon]
    ;;          longitude
    ;;    lat: array [lat]
    ;;          latitude
    ;;    time: iris cube coords
    ;;          time info of cube
    ;;    
    ;;
    ;; Description
    ;;    Read cmip5 input data for computing the diagnostic
    ;;
    """

    #-------------------------
    # Input data directories
    #-------------------------

    Patt = filedir + 'pr_3hr_inmcm4_amip_r1i1p1_{}010101-{}123122.nc'
    pr_files = [Patt.format(y, y) for y in years]

    Patt = filedir + 'mrsos_3hr_inmcm4_amip_r1i1p1_{}010100-{}123121.nc'
    sm_files = [Patt.format(y, y) for y in years]

    #----------------------
    # Read in precipitation
    #----------------------

    pr_list = []

    for pr_file in pr_files:

        print 'Reading precipitation from ' + pr_file

        pr = iris.load(pr_file)[0]

        for at_k in pr.attributes.keys():
            pr.attributes.pop(at_k)

        pr_list.append(pr)

    pr = iris.cube.CubeList(pr_list)
    pr = pr.concatenate()[0]

    # Convert longitude from 0_360 to -180_180

    pr = coord_change([pr])[0]

    # Add metadata: day, month, year

    add_month(pr, 'time')
    add_day_of_month(pr, 'time', name='dom')
    add_year(pr, 'time')

    # Convert units to kg m-2 hr-1

    pr.convert_units('kg m-2 hr-1')

    #-----------------------
    # Read in soil moisture
    #-----------------------

    sm_list = []

    for sm_file in sm_files:

        print 'Reading soil moisture from ' + sm_file

        sm = iris.load(sm_file)[0]

        for at_k in sm.attributes.keys():
            sm.attributes.pop(at_k)

        sm_list.append(sm)

    sm = iris.cube.CubeList(sm_list)
    sm = sm.concatenate()[0]

    # Convert longitude from 0_360 to -180_180

    sm = coord_change([sm])[0]

    # Add metadata: day, month, year

    add_month(sm, 'time')
    add_day_of_month(sm, 'time', name='dom')
    add_year(sm, 'time')

    #----------------------------------------------
    # Constrain pr and sm data to latitude 60S_60N
    #----------------------------------------------

    latconstraint = iris.Constraint(
        latitude=lambda cell: -59.0 <= cell <= 59.0)

    pr = pr.extract(latconstraint)
    sm = sm.extract(latconstraint)

    #---------------------------------------------------
    # Read in grid info: latitude, longitude, timestamp
    #---------------------------------------------------

    lon = sm.coords('longitude')[0].points
    lat = sm.coords('latitude')[0].points
    time = sm.coords('time')

    # --------------------------------------
    # Convert missing data (if any) to -999.
    # --------------------------------------

    try:
        sm.data.set_fill_value(-999)
        sm.data.data[sm.data.mask] = -999.

    except:
        print 'no missing data conversion'

    #----------------------
    # Read in topography
    #----------------------

    # Topography map specs:
    # latitude 60S_60N
    # longitude 180W_180E
    # model resolution

    ftopo = filedir + 'topo_var_5x5_inmcm4.gra'
    dt = '>f4'
    topo = (np.fromfile(ftopo, dtype=dt)).reshape(len(lat), len(lon))

    #-----------------------------------------------
    # Return input data to compute sm_pr diagnostic
    #-----------------------------------------------
    return pr, sm, topo, lon, lat, time
Esempio n. 21
0
    warnings.simplefilter('ignore', UserWarning)
    uwnd = iris.load_cube(example_data_path('uwnd_mean.nc'))
    vwnd = iris.load_cube(example_data_path('vwnd_mean.nc'))
uwnd.coord('longitude').circular = True
vwnd.coord('longitude').circular = True

# Create a VectorWind instance to handle the computation of streamfunction and
# velocity potential.
w = VectorWind(uwnd, vwnd)

# Compute the streamfunction and velocity potential.
sf, vp = w.sfvp()

# Pick out the field for December.
time_constraint = iris.Constraint(month='Dec')
add_month(sf, 'time', name='month')
add_month(vp, 'time', name='month')
sf_dec = sf.extract(time_constraint)
vp_dec = vp.extract(time_constraint)

# Plot streamfunction.
clevs = [-120, -100, -80, -60, -40, -20, 0, 20, 40, 60, 80, 100, 120]
ax = plt.subplot(111, projection=ccrs.PlateCarree(central_longitude=180))
fill_sf = iplt.contourf(sf_dec * 1e-06,
                        clevs,
                        cmap=plt.cm.RdBu_r,
                        extend='both')
ax.coastlines()
ax.gridlines()
plt.colorbar(fill_sf, orientation='horizontal')
plt.title('Streamfunction ($10^6$m$^2$s$^{-1}$)', fontsize=16)