Example #1
0
def seasonal_mean(cube):
    seasons = ['winter', 'summer']
    icc.add_season(cube, 'time', 'clim_season')
    icc.add_season_year(cube, 'time', 'season_year')
    season_mean_cube_list = iris.cube.CubeList([])
    season_max_cube_list = iris.cube.CubeList([])
    season_min_cube_list = iris.cube.CubeList([])
    for season in seasons:
        if season == 'winter':
            months = 'djf'
        if season == 'spring':
            months = 'mam'
        if season == 'summer':
            months = 'jja'
        if season == 'autumn':
            months = 'son'
        single_season_cube = cube.extract(iris.Constraint(clim_season=months))
        season_mean_cube = single_season_cube.aggregated_by(
            ['clim_season', 'season_year'], iris.analysis.MEAN)
        season_mean_cube.rename(season_mean_cube.name() + '_' + season +
                                '_mean')
        season_mean_cube_list.append(season_mean_cube)
        season_max = single_season_cube.aggregated_by(
            ['clim_season', 'season_year'], iris.analysis.MAX)
        season_max.rename(season_max.name() + '_' + season + '_max')
        season_max_cube_list.append(season_max)
        season_min = single_season_cube.aggregated_by(
            ['clim_season', 'season_year'], iris.analysis.MIN)
        season_min.rename(season_min.name() + '_' + season + '_min')
        season_min_cube_list.append(season_min)
    return [season_mean_cube_list, season_max_cube_list, season_min_cube_list]
Example #2
0
	def add_times(cube, time):
		coord_cat.add_month(cube, time, name='month')
		coord_cat.add_season(cube, time, name='clim_season')
		coord_cat.add_year(cube, time, name='year')
		coord_cat.add_day_of_year(cube, time, name='day_number')
		coord_cat.add_season_year(cube, time, name='season_year')
		return cube
 def test_add_custom_season(self):
     # custom seasons match standard seasons?
     seasons = ('djf', 'mam', 'jja', 'son')
     ccat.add_season(self.cube, 'time', name='season_std')
     ccat.add_custom_season(self.cube, 'time', seasons,
                            name='season_custom')
     coord_std = self.cube.coord('season_std')
     coord_custom = self.cube.coord('season_custom')
     self.assertArrayEqual(coord_custom.points, coord_std.points)
Example #4
0
 def test_add_season_membership(self):
     # season membership identifies correct seasons?
     season = "djf"
     ccat.add_season_membership(self.cube, "time", season, name="in_season")
     ccat.add_season(self.cube, "time")
     coord_season = self.cube.coord("season")
     coord_membership = self.cube.coord("in_season")
     season_locations = np.where(coord_season.points == season)[0]
     membership_locations = np.where(coord_membership.points)[0]
     self.assertArrayEqual(membership_locations, season_locations)
Example #5
0
 def test_add_season_membership(self):
     # season membership identifies correct seasons?
     season = 'djf'
     ccat.add_season_membership(self.cube, 'time', season, name='in_season')
     ccat.add_season(self.cube, 'time')
     coord_season = self.cube.coord('season')
     coord_membership = self.cube.coord('in_season')
     season_locations = np.where(coord_season.points == season)[0]
     membership_locations = np.where(coord_membership.points)[0]
     self.assertArrayEqual(membership_locations, season_locations)
Example #6
0
 def test_add_season_nonstandard(self):
     # season categorisations work for non-standard seasons?
     cube = self.cube
     time_coord = self.time_coord
     seasons = ['djfm', 'amjj', 'ason']
     ccat.add_season(cube, time_coord, name='seasons', seasons=seasons)
     ccat.add_season_number(cube, time_coord, name='season_numbers',
                            seasons=seasons)
     ccat.add_season_year(cube, time_coord, name='season_years',
                          seasons=seasons)
     self.assertCML(cube, ('categorisation', 'customcheck.cml'))
 def test_add_season_membership(self):
     # season membership identifies correct seasons?
     season = 'djf'
     ccat.add_season_membership(self.cube, 'time', season,
                                name='in_season')
     ccat.add_season(self.cube, 'time')
     coord_season = self.cube.coord('season')
     coord_membership = self.cube.coord('in_season')
     season_locations = np.where(coord_season.points == season)[0]
     membership_locations = np.where(coord_membership.points)[0]
     self.assertArrayEqual(membership_locations, season_locations)
Example #8
0
 def test_add_season_nonstandard(self):
     # season categorisations work for non-standard seasons?
     cube = self.cube
     time_coord = self.time_coord
     seasons = ["djfm", "amjj", "ason"]
     ccat.add_season(cube, time_coord, name="seasons", seasons=seasons)
     ccat.add_season_number(
         cube, time_coord, name="season_numbers", seasons=seasons
     )
     ccat.add_season_year(
         cube, time_coord, name="season_years", seasons=seasons
     )
     self.assertCML(cube, ("categorisation", "customcheck.cml"))
def select_certain_months(cubes, lbmon):
    """
    Select data from CubeList that matches the specified months.

    :param CubeList cubes: Iris CubeList.
    :param list lbmon: List with month numbers, e.g. lbmon=[5,6,7] for Mai,
        June, and July.
    :returns: CubeList with Cubes containing only data for the specified mnth.
    :rtype: CubeList
    :raises: `AssertionError` if `cubes` is not an `iris.cube.CubeList`.
    """
    # add 'month number' coordinate
    add_time_coord = {
        'monthly':
        lambda cube: coord_cat.add_month_number(
            cube, 'time', name='month_number'),
        'seasonal':
        lambda cube: coord_cat.add_season(cube, 'time', name='clim_season'),
        'annual':
        lambda cube: coord_cat.add_season_year(
            cube, 'time', name='season_year')
    }
    assert isinstance(cubes, iris.cube.CubeList)

    for cube in cubes:
        add_time_coord['monthly'](cube)

    # filter by month number
    month_constraint = iris.Constraint(month_number=lbmon)
    return cubes.extract(
        month_constraint)  # CubeList.extract returns always CubeList
    def test_basic(self):
        cube = self.cube
        time_coord = self.time_coord

        ccat.add_year(cube, time_coord, 'my_year')
        ccat.add_day_of_month(cube, time_coord, 'my_day_of_month')
        ccat.add_day_of_year(cube, time_coord, 'my_day_of_year')

        ccat.add_month(cube, time_coord, 'my_month')
        with warnings.catch_warnings(record=True):
            ccat.add_month_shortname(cube, time_coord, 'my_month_shortname')
        ccat.add_month_fullname(cube, time_coord, 'my_month_fullname')
        ccat.add_month_number(cube, time_coord, 'my_month_number')

        ccat.add_weekday(cube, time_coord, 'my_weekday')
        ccat.add_weekday_number(cube, time_coord, 'my_weekday_number')
        with warnings.catch_warnings(record=True):
            ccat.add_weekday_shortname(cube, time_coord,
                                       'my_weekday_shortname')
        ccat.add_weekday_fullname(cube, time_coord, 'my_weekday_fullname')

        ccat.add_season(cube, time_coord, 'my_season')
        ccat.add_season_number(cube, time_coord, 'my_season_number')
        with warnings.catch_warnings(record=True):
            ccat.add_season_month_initials(cube, time_coord,
                                           'my_season_month_initials')
        ccat.add_season_year(cube, time_coord, 'my_season_year')

        # also test 'generic' categorisation interface
        def _month_in_quarter(coord, pt_value):
            date = coord.units.num2date(pt_value)
            return (date.month - 1) % 3

        ccat.add_categorised_coord(cube,
                                   'my_month_in_quarter',
                                   time_coord,
                                   _month_in_quarter)

        # To ensure consistent results between 32-bit and 64-bit
        # platforms, ensure all the numeric categorisation coordinates
        # are always stored as int64.
        for coord in cube.coords():
            if coord.long_name is not None and coord.points.dtype.kind == 'i':
                coord.points = coord.points.astype(np.int64)

        # check values
        self.assertCML(cube, ('categorisation', 'quickcheck.cml'))
Example #11
0
    def test_basic(self):
        cube = self.cube
        time_coord = self.time_coord

        ccat.add_year(cube, time_coord, 'my_year')
        ccat.add_day_of_month(cube, time_coord, 'my_day_of_month')
        ccat.add_day_of_year(cube, time_coord, 'my_day_of_year')

        ccat.add_month(cube, time_coord, 'my_month')
        with warnings.catch_warnings(record=True):
            ccat.add_month_shortname(cube, time_coord, 'my_month_shortname')
        ccat.add_month_fullname(cube, time_coord, 'my_month_fullname')
        ccat.add_month_number(cube, time_coord, 'my_month_number')

        ccat.add_weekday(cube, time_coord, 'my_weekday')
        ccat.add_weekday_number(cube, time_coord, 'my_weekday_number')
        with warnings.catch_warnings(record=True):
            ccat.add_weekday_shortname(cube, time_coord,
                                       'my_weekday_shortname')
        ccat.add_weekday_fullname(cube, time_coord, 'my_weekday_fullname')

        ccat.add_season(cube, time_coord, 'my_season')
        ccat.add_season_number(cube, time_coord, 'my_season_number')
        with warnings.catch_warnings(record=True):
            ccat.add_season_month_initials(cube, time_coord,
                                           'my_season_month_initials')
        ccat.add_season_year(cube, time_coord, 'my_season_year')

        # also test 'generic' categorisation interface
        def _month_in_quarter(coord, pt_value):
            date = coord.units.num2date(pt_value)
            return (date.month - 1) % 3

        ccat.add_categorised_coord(cube, 'my_month_in_quarter', time_coord,
                                   _month_in_quarter)

        # To ensure consistent results between 32-bit and 64-bit
        # platforms, ensure all the numeric categorisation coordinates
        # are always stored as int64.
        for coord in cube.coords():
            if coord.long_name is not None and coord.points.dtype.kind == 'i':
                coord.points = coord.points.astype(np.int64)

        # check values
        self.assertCML(cube, ('categorisation', 'quickcheck.cml'))
    def test_basic(self):
        #make a series of 'day numbers' for the time, that slide across month boundaries
        day_numbers =  np.arange(0, 600, 27, dtype=np.int32)
        
        cube = iris.cube.Cube(day_numbers, long_name='test cube', units='metres')

        #use day numbers as data values also (don't actually use this for anything)
        cube.data = day_numbers 
        
        time_coord = iris.coords.DimCoord(
            day_numbers, standard_name='time', units=iris.unit.Unit('days since epoch', 'gregorian'))
        cube.add_dim_coord(time_coord, 0)

        #add test coordinates for examples wanted    
        ccat.add_year(cube, time_coord)
        ccat.add_day_of_month(cube, 'time')    #NB test passing coord-name instead of coord itself

        ccat.add_month(cube, time_coord)
        ccat.add_month_shortname(cube, time_coord, name='month_short')
        ccat.add_month_fullname(cube, time_coord, name='month_full')
        ccat.add_month_number(cube, time_coord, name='month_number')
        
        ccat.add_weekday(cube, time_coord)
        ccat.add_weekday_number(cube, time_coord, name='weekday_number')
        ccat.add_weekday_shortname(cube, time_coord, name='weekday_short')
        ccat.add_weekday_fullname(cube, time_coord, name='weekday_full')

        ccat.add_season(cube, time_coord)
        ccat.add_season_number(cube, time_coord, name='season_number')
        ccat.add_season_month_initials(cube, time_coord, name='season_months')
        ccat.add_season_year(cube, time_coord, name='year_ofseason')
        
        #also test 'generic' categorisation interface
        def _month_in_quarter(coord, pt_value):
            date = coord.units.num2date(pt_value)
            return (date.month - 1) % 3

        ccat.add_categorised_coord(cube, 'month_in_quarter', time_coord, _month_in_quarter)

        for coord_name in ['month_number', 'month_in_quarter', 'weekday_number', 'season_number', 'year_ofseason', 'year', 'day']:
            cube.coord(coord_name).points = cube.coord(coord_name).points.astype(np.int64)

        #check values
        self.assertCML(cube, ('categorisation', 'quickcheck.cml'))
def seasonal_mean(mycube):
    """
    Function to compute seasonal means with MEAN.

    Chunks time in 3-month periods and computes means over them;
    Returns a cube.
    """
    if 'clim_season' not in mycube.coords():
        coord_cat.add_season(mycube, 'time', name='clim_season')
    if 'season_year' not in mycube.coords():
        coord_cat.add_season_year(mycube, 'time', name='season_year')
    annual_seasonal_mean = mycube.aggregated_by(['clim_season', 'season_year'],
                                                iris.analysis.MEAN)

    def spans_three_months(time):
        """Check for three months."""
        return (time.bound[1] - time.bound[0]) == 90  # days

    three_months_bound = iris.Constraint(time=spans_three_months)
    return annual_seasonal_mean.extract(three_months_bound)
Example #14
0
    def test_basic(self):
        cube = self.cube
        time_coord = self.time_coord

        ccat.add_year(cube, time_coord, "my_year")
        ccat.add_day_of_month(cube, time_coord, "my_day_of_month")
        ccat.add_day_of_year(cube, time_coord, "my_day_of_year")

        ccat.add_month(cube, time_coord, "my_month")
        ccat.add_month_fullname(cube, time_coord, "my_month_fullname")
        ccat.add_month_number(cube, time_coord, "my_month_number")

        ccat.add_weekday(cube, time_coord, "my_weekday")
        ccat.add_weekday_number(cube, time_coord, "my_weekday_number")
        ccat.add_weekday_fullname(cube, time_coord, "my_weekday_fullname")

        ccat.add_season(cube, time_coord, "my_season")
        ccat.add_season_number(cube, time_coord, "my_season_number")
        ccat.add_season_year(cube, time_coord, "my_season_year")

        # also test 'generic' categorisation interface
        def _month_in_quarter(coord, pt_value):
            date = coord.units.num2date(pt_value)
            return (date.month - 1) % 3

        ccat.add_categorised_coord(
            cube, "my_month_in_quarter", time_coord, _month_in_quarter
        )

        # To ensure consistent results between 32-bit and 64-bit
        # platforms, ensure all the numeric categorisation coordinates
        # are always stored as int64.
        for coord in cube.coords():
            if coord.long_name is not None and coord.points.dtype.kind == "i":
                coord.points = coord.points.astype(np.int64)

        # check values
        self.assertCML(cube, ("categorisation", "quickcheck.cml"))
def plot_vector_fields(u_path="",
                       v_path="",
                       u_name="vozocrtx",
                       v_name="vomecrty",
                       level=0):
    name_constraint = iris.Constraint(
        cube_func=lambda c: c.var_name == u_name or c.var_name == v_name)

    u_cube = iris.load_cube(u_path, constraint=name_constraint)
    u_cube = u_cube.extract(
        iris.Constraint(model_level_number=u_cube.coord(
            "model_level_number").points[level]))

    v_cube = iris.load_cube(v_path, constraint=name_constraint)
    v_cube = v_cube.extract(
        iris.Constraint(model_level_number=v_cube.coord(
            "model_level_number").points[level]))
    assert isinstance(u_cube, Cube)

    #calculate seasonal means
    coord_categorisation.add_season(u_cube, "time")
    coord_categorisation.add_season(v_cube, "time")

    u_cube_seasonal = u_cube.aggregated_by("season", analysis.MEAN)
    v_cube_seasonal = v_cube.aggregated_by("season", analysis.MEAN)

    #plot results
    b, lons, lats = nemo_commons.get_basemap_and_coordinates_from_file(
        T_FILE_PATH, resolution="h")
    x, y = b(lons, lats)
    the_mask = nemo_commons.get_mask(
        path=os.path.join(EXP_DIR, "bathy_meter.nc"))

    levels = np.arange(0, 0.11, 0.01)
    bn = BoundaryNorm(levels, len(levels) - 1)
    cmap = cm.get_cmap("Accent", len(levels) - 1)

    for season in u_cube_seasonal.coord("season").points:
        print(season)
        fig = plt.figure(figsize=(8, 4))

        ax = fig.add_subplot(111)
        ax.set_title(season.upper())
        u = u_cube_seasonal.extract(iris.Constraint(season=season)).data
        v = v_cube_seasonal.extract(iris.Constraint(season=season)).data

        u = np.ma.masked_where(~the_mask, u)
        v = np.ma.masked_where(~the_mask, v)

        speed = np.sqrt(u**2 + v**2)
        cs = b.pcolormesh(x,
                          y,
                          speed,
                          norm=bn,
                          vmin=levels[0],
                          vmax=levels[-1],
                          cmap=cmap)
        b.colorbar(cs)

        u, v = b.rotate_vector(u, v, lons, lats)
        q = b.quiver(x, y, u, v, scale=1.5, width=0.002)

        qk = plt.quiverkey(q, 0.15, 0.1, 0.05, '0.05 m/s', labelpos='W')
        b.drawcoastlines(linewidth=cpp.COASTLINE_WIDTH)

        fname = "{0}-{1}_{2}.jpeg".format(u_cube_seasonal.var_name,
                                          v_cube_seasonal.var_name, season)

        if not os.path.isdir(NEMO_IMAGES_DIR):
            os.mkdir(NEMO_IMAGES_DIR)
        fig.tight_layout()
        fig.savefig(os.path.join(NEMO_IMAGES_DIR, fname), dpi=cpp.FIG_SAVE_DPI)

    #plot annual mean
    fig = plt.figure()
    ax = fig.add_subplot(111)
    fname = "{0}-{1}_{2}.jpeg".format(u_cube_seasonal.var_name,
                                      v_cube_seasonal.var_name, "annual")
    u_annual = u_cube_seasonal.collapsed("season", analysis.MEAN).data
    v_annual = v_cube_seasonal.collapsed("season", analysis.MEAN).data

    u_annual = np.ma.masked_where(~the_mask, u_annual)
    v_annual = np.ma.masked_where(~the_mask, v_annual)

    fig.suptitle("Annual")
    q = b.quiver(x, y, u_annual, v_annual, scale=1.5, width=0.002, zorder=5)
    qk = plt.quiverkey(q, 0.15, 0.1, 0.05, '0.05 m/s', labelpos='W')

    levels = np.arange(0, 0.15, 0.01)
    bn = BoundaryNorm(levels, len(levels) - 1)
    #cmap = my_colormaps.get_cmap_from_ncl_spec_file("colormap_files/wgne15.rgb", len(levels) - 1)
    cmap = cm.get_cmap("Paired", len(levels) - 1)
    cs = b.pcolormesh(x,
                      y,
                      np.sqrt(u_annual**2 + v_annual**2),
                      cmap=cmap,
                      norm=bn)
    b.drawcoastlines(linewidth=cpp.COASTLINE_WIDTH)
    b.colorbar(cs)
    fig.tight_layout()
    fig.savefig(os.path.join(NEMO_IMAGES_DIR, fname), dpi=cpp.FIG_SAVE_DPI)
Example #16
0
def add_time_coord_cats(cube):
    """
    This function takes in an iris cube, and adds a range of
    numeric co-ordinate categorisations to it. Depending
    on the data, not all of the coords added will be relevant.

    args
    ----
    cube: iris cube that has a coordinate called 'time'

    Returns
    -------
    Cube: cube that has new time categorisation coords added

    Notes
    -----
    test

    A simple example:

    >>> file = os.path.join(conf.DATA_DIR, 'mslp.daily.rcm.viet.nc')
    >>> cube = iris.load_cube(file)
    >>> coord_names = [coord.name() for coord in cube.coords()]
    >>> print((', '.join(coord_names)))
    time, grid_latitude, grid_longitude
    >>> ccube = add_time_coord_cats(cube)
    >>> coord_names = [coord.name() for coord in ccube.coords()]
    >>> print((', '.join(coord_names)))
    time, grid_latitude, grid_longitude, day_of_month, day_of_year, month, \
month_number, season, season_number, year
    >>> # print every 50th value of the added time cat coords
    ... for c in coord_names[3:]:
    ...     print(ccube.coord(c).long_name)
    ...     print(ccube.coord(c).points[::50])
    ...
    day_of_month
    [ 1 21 11  1 21 11  1 21]
    day_of_year
    [  1  51 101 151 201 251 301 351]
    month
    ['Jan' 'Feb' 'Apr' 'Jun' 'Jul' 'Sep' 'Nov' 'Dec']
    month_number
    [ 1  2  4  6  7  9 11 12]
    season
    ['djf' 'djf' 'mam' 'jja' 'jja' 'son' 'son' 'djf']
    season_number
    [0 0 1 2 2 3 3 0]
    year
    [2000 2000 2000 2000 2000 2000 2000 2000]

    """

    # most errors pop up when you try to add a coord that has
    # previously been added, or the cube doesn't contain the
    # necessary attribute.

    ccube = cube.copy()

    # numeric
    try:
        iccat.add_day_of_year(ccube, "time")
    except AttributeError as err:
        print(("add_time_coord_cats: {}, skipping . . . ".format(err)))
    except ValueError as err:
        print(("add_time_coord_cats: {}, skipping . . . ".format(err)))
    try:
        iccat.add_day_of_month(ccube, "time")
    except AttributeError as err:
        print(("add_time_coord_cats: {}, skipping . . . ".format(err)))
    except ValueError as err:
        print(("add_time_coord_cats: {}, skipping . . . ".format(err)))
    try:
        iccat.add_month_number(ccube, "time")
    except AttributeError as err:
        print(("add_time_coord_cats: {}, skipping . . . ".format(err)))
    except ValueError as err:
        print(("add_time_coord_cats: {}, skipping . . . ".format(err)))
    try:
        iccat.add_season_number(ccube, "time")
    except AttributeError as err:
        print(("add_time_coord_cats: {}, skipping . . . ".format(err)))
    except ValueError as err:
        print(("add_time_coord_cats: {}, skipping . . . ".format(err)))
    try:
        iccat.add_year(ccube, "time")
    except AttributeError as err:
        print(("add_time_coord_cats: {}, skipping . . . ".format(err)))
    except ValueError as err:
        print(("add_time_coord_cats: {}, skipping . . . ".format(err)))
    # strings
    try:
        iccat.add_month(ccube, "time")
    except AttributeError as err:
        print(("add_time_coord_cats: {}, skipping . . . ".format(err)))
    except ValueError as err:
        print(("add_time_coord_cats: {}, skipping . . . ".format(err)))
    try:
        iccat.add_season(ccube, "time")
    except AttributeError as err:
        print(("add_time_coord_cats: {}, skipping . . . ".format(err)))
    except ValueError as err:
        print(("add_time_coord_cats: {}, skipping . . . ".format(err)))

    return ccube
Example #17
0
def mainfunc(run):
    """Main function in stratospheric assessment code."""
    metrics = dict()

    # Set up to only run for 10 year period (eventually)
    year_cons = dict(from_dt=run['from_monthly'], to_dt=run['to_monthly'])

    # Read zonal mean U (lbproc=192) and add month number to metadata
    ucube = load_run_ss(
        run, 'monthly', 'eastward_wind', lbproc=192, **year_cons)
    # Although input data is a zonal mean, iris does not recognise it as such
    # and just reads it as having a single longitudinal coordinate. This
    # removes longitude as a dimension coordinate and makes it a scalar
    # coordinate in line with how a zonal mean would be described.
    # Is there a better way of doing this?
    ucube_cds = [cdt.standard_name for cdt in ucube.coords()]
    if 'longitude' in ucube_cds:
        ucube = ucube.collapsed('longitude', iris.analysis.MEAN)
    if not ucube.coord('latitude').has_bounds():
        ucube.coord('latitude').guess_bounds()
    # check for month_number
    aux_coord_names = [aux_coord.var_name for aux_coord in ucube.aux_coords]
    if 'month_number' not in aux_coord_names:
        icc.add_month_number(ucube, 'time', name='month_number')

    # Read zonal mean T (lbproc=192) and add clim month and season to metadata
    tcube = load_run_ss(
        run, 'monthly', 'air_temperature', lbproc=192,
        **year_cons)  # m01s30i204
    # Although input data is a zonal mean, iris does not recognise it as such
    # and just reads it as having a single longitudinal coordinate. This
    # removes longitude as a dimension coordinate and makes it a scalar
    # coordinate in line with how a zonal mean would be described.
    # Is there a better way of doing this?
    tcube_cds = [cdt.standard_name for cdt in tcube.coords()]
    if 'longitude' in tcube_cds:
        tcube = tcube.collapsed('longitude', iris.analysis.MEAN)
    if not tcube.coord('latitude').has_bounds():
        tcube.coord('latitude').guess_bounds()
    aux_coord_names = [aux_coord.var_name for aux_coord in tcube.aux_coords]
    if 'month' not in aux_coord_names:
        icc.add_month(tcube, 'time', name='month')
    if 'clim_season' not in aux_coord_names:
        icc.add_season(tcube, 'time', name='clim_season')

    # Read zonal mean q (lbproc=192) and add clim month and season to metadata
    qcube = load_run_ss(
        run, 'monthly', 'specific_humidity', lbproc=192,
        **year_cons)  # m01s30i205
    # Although input data is a zonal mean, iris does not recognise it as such
    # and just reads it as having a single longitudinal coordinate. This
    # removes longitude as a dimension coordinate and makes it a scalar
    # coordinate in line with how a zonal mean would be described.
    # Is there a better way of doing this?
    qcube_cds = [cdt.standard_name for cdt in qcube.coords()]
    if 'longitude' in qcube_cds:
        qcube = qcube.collapsed('longitude', iris.analysis.MEAN)
    if not qcube.coord('latitude').has_bounds():
        qcube.coord('latitude').guess_bounds()
    aux_coord_names = [aux_coord.var_name for aux_coord in qcube.aux_coords]
    if 'month' not in aux_coord_names:
        icc.add_month(qcube, 'time', name='month')
    if 'clim_season' not in aux_coord_names:
        icc.add_season(qcube, 'time', name='clim_season')

    # Calculate PNJ metrics
    pnj_metrics(run, ucube, metrics)

    # Calculate QBO metrics
    qbo_metrics(run, ucube, metrics)

    # Calculate polar temperature metrics
    tpole_metrics(run, tcube, metrics)

    # Calculate equatorial temperature metrics
    teq_metrics(run, tcube, metrics)

    # Calculate tropical temperature metrics
    t_metrics(run, tcube, metrics)

    # Calculate tropical water vapour metric
    q_metrics(run, qcube, metrics)

    # Summary metric
    summary_metric(metrics)

    # Make sure all metrics are of type float
    # Need at the moment to populate metrics files
    for key, value in metrics.items():
        metrics[key] = float(value)

    return metrics
def derive_variability(cube, runid, lon_range_pdo, lon_range_amo, savedir, remove_global_means, neofs, period_title=''):
    sst_cube = cube.copy()
    # mask the sea-ice regions
    sst_cube = mask_ice(sst_cube)
    
    sst_cube_monthanom, tmp_avg = remove_monthly_avg(sst_cube)
    tpi_ts = tpi_timeseries(sst_cube_monthanom)
    tpi_smooth = tpi_ts['tpi'].copy()
    smooth_data = utilities_mr.smooth(tpi_ts['tpi'].data, window_len=156, window = 'hanning')
    print len(smooth_data)
    tpi_smooth.data[:len(smooth_data)] = smooth_data

    #print 'tpi', tpi_ts['tpi']
    #print 'reg2', tpi_ts['reg2']

    file_out = os.path.join(savedir, runid+'_tpi_timeseries.nc')
    tpi_ts['tpi'].long_name = 'TPI timeseries'
    iris.save(tpi_ts['tpi'], file_out, netcdf_format="NETCDF3_CLASSIC")

    #print 'sst cube for nino ',sst_cube
    try:
        icc.add_season(sst_cube, 'time', name='clim_season')
        icc.add_season_year(sst_cube, 'time', name='season_year')
    except:
        pass
    #print 'sst cube for nino ',sst_cube
    #print 'sst cube for nino ',sst_cube.coord('clim_season')
    
    sst_djf = sst_cube.extract(DJF_constraint)
    nino_timeseries = nino34_timeseries(sst_djf)
    nino_timeseries.long_name = 'NINO3.4 timeseries'
    file_out = os.path.join(savedir, runid+'_nino34_timeseries'+period_title+'.nc')
    iris.io.save(nino_timeseries,file_out, netcdf_format="NETCDF3_CLASSIC")

    # calculate annual means
    sst_ann = sst_cube.aggregated_by('year', iris.analysis.MEAN)

# mask out land when using surface temperature
    if remove_global_means: sst_ann = remove_glob_avg(sst_ann)
    
# calculate PDO EOfs, timeseries and regression
    pdo_pc = calculate_pdo_eof_timeseries(runid, sst_ann, savedir, lat_range_pdo, lon_range_pdo, neofs, remove_global_means, period_title)
    #print 'pdo_pc ',pdo_pc

# calculate PDO SST regression and write file
    pltdir = os.path.join(savedir, 'pdo_patterns/plts/gc2/')
    if not os.path.exists(pltdir):
        os.makedirs(pltdir)
    pdofile = runid+'_pdo_eof1_pc_time_series_glob_mean_sst_removed'+period_title+'.nc'
    pdo_cube = iris.load(savedir + pdofile)[0]
    calc_regression(sst_ann, pltdir, pdo_cube, runid, 'PDO', period_title)

#calculate AMO timeseries
    amo_timeseries = calc_amo_timeseries(sst_ann, runid, lon_range_amo, lat_range_amo)
    amo_timeseries.long_name='AMO timeseries'
    file_out = os.path.join(savedir, runid+'_amo_timeseries_trenberth'+period_title+'.nc')
    iris.io.save(amo_timeseries,file_out, netcdf_format="NETCDF3_CLASSIC")

    
#calculate AMO regression and write files
    amodir = os.path.join(savedir, 'amo')
    if not os.path.exists(amodir): os.makedirs(amodir)
    calculate_amo_regression(sst_ann, amo_timeseries, lon_range_amo, lat_range_amo, amodir, runid, period_title)

    resol_model = ['HadISST']; title = 'HadISST'; desc_model = ['Nino3.4','AMO','PDO','TPI']; 
    ymin = [-3, -0.3, -0.2, -1.]
    ymax = [3, 0.3, 0.2, 1.]
    fig = plt.figure(figsize=(8,11),dpi=100)#dpi=300
    for i, ts in enumerate([nino_timeseries, amo_timeseries, pdo_pc, tpi_ts['tpi']]):
        subpl=fig.add_subplot(4,1,i+1)
        period = 'year'
        if i == 3: period = 'month'
        plot_timeseries(i, runid, ts, resol_model[0], desc_model[i], title, \
                        subpl, ymin[i], ymax[i], period = period)
    plt.savefig(os.path.join(savedir,runid+'_modes_timeseries'+period_title+'.png'))
    plt.show()
    del sst_cube
Example #19
0
def plot_vector_fields(u_path="", v_path="", u_name="vozocrtx", v_name="vomecrty", level=0):
    name_constraint = iris.Constraint(cube_func=lambda c: c.var_name == u_name or c.var_name == v_name)

    u_cube = iris.load_cube(u_path, constraint=name_constraint)
    u_cube = u_cube.extract(iris.Constraint(model_level_number=u_cube.coord("model_level_number").points[level]))

    v_cube = iris.load_cube(v_path, constraint=name_constraint)
    v_cube = v_cube.extract(iris.Constraint(model_level_number=v_cube.coord("model_level_number").points[level]))
    assert isinstance(u_cube, Cube)

    #calculate seasonal means
    coord_categorisation.add_season(u_cube, "time")
    coord_categorisation.add_season(v_cube, "time")

    u_cube_seasonal = u_cube.aggregated_by("season", analysis.MEAN)
    v_cube_seasonal = v_cube.aggregated_by("season", analysis.MEAN)



    #plot results
    b, lons, lats = nemo_commons.get_basemap_and_coordinates_from_file(T_FILE_PATH, resolution="h")
    x, y = b(lons, lats)
    the_mask = nemo_commons.get_mask(path=os.path.join(EXP_DIR, "bathy_meter.nc"))

    levels = np.arange(0, 0.11, 0.01)
    bn = BoundaryNorm(levels, len(levels) - 1)
    cmap = cm.get_cmap("Accent", len(levels) - 1)

    for season in u_cube_seasonal.coord("season").points:
        print(season)
        fig = plt.figure(figsize=(8, 4))

        ax = fig.add_subplot(111)
        ax.set_title(season.upper())
        u = u_cube_seasonal.extract(iris.Constraint(season=season)).data
        v = v_cube_seasonal.extract(iris.Constraint(season=season)).data

        u = np.ma.masked_where(~the_mask, u)
        v = np.ma.masked_where(~the_mask, v)

        speed = np.sqrt(u ** 2 + v ** 2)
        cs = b.pcolormesh(x, y, speed, norm=bn, vmin=levels[0], vmax=levels[-1], cmap=cmap)
        b.colorbar(cs)

        u, v = b.rotate_vector(u, v, lons, lats)
        q = b.quiver(x, y, u, v, scale=1.5, width=0.002)

        qk = plt.quiverkey(q, 0.15, 0.1, 0.05, '0.05 m/s', labelpos='W')
        b.drawcoastlines(linewidth=cpp.COASTLINE_WIDTH)

        fname = "{0}-{1}_{2}.jpeg".format(u_cube_seasonal.var_name, v_cube_seasonal.var_name, season)

        if not os.path.isdir(NEMO_IMAGES_DIR):
            os.mkdir(NEMO_IMAGES_DIR)
        fig.tight_layout()
        fig.savefig(os.path.join(NEMO_IMAGES_DIR, fname), dpi=cpp.FIG_SAVE_DPI)

    #plot annual mean
    fig = plt.figure()
    ax = fig.add_subplot(111)
    fname = "{0}-{1}_{2}.jpeg".format(u_cube_seasonal.var_name, v_cube_seasonal.var_name, "annual")
    u_annual = u_cube_seasonal.collapsed("season", analysis.MEAN).data
    v_annual = v_cube_seasonal.collapsed("season", analysis.MEAN).data

    u_annual = np.ma.masked_where(~the_mask, u_annual)
    v_annual = np.ma.masked_where(~the_mask, v_annual)

    fig.suptitle("Annual")
    q = b.quiver(x, y, u_annual, v_annual, scale=1.5, width=0.002, zorder=5)
    qk = plt.quiverkey(q, 0.15, 0.1, 0.05, '0.05 m/s', labelpos='W')

    levels = np.arange(0, 0.15, 0.01)
    bn = BoundaryNorm(levels, len(levels) - 1)
    #cmap = my_colormaps.get_cmap_from_ncl_spec_file("colormap_files/wgne15.rgb", len(levels) - 1)
    cmap = cm.get_cmap("Paired", len(levels) - 1)
    cs = b.pcolormesh(x, y, np.sqrt(u_annual ** 2 + v_annual ** 2), cmap=cmap, norm=bn)
    b.drawcoastlines(linewidth=cpp.COASTLINE_WIDTH)
    b.colorbar(cs)
    fig.tight_layout()
    fig.savefig(os.path.join(NEMO_IMAGES_DIR, fname), dpi=cpp.FIG_SAVE_DPI)
def plot_cross_section_for_seasons(data_path="",
                                   i_start=0,
                                   j_start=0,
                                   i_end=-1,
                                   j_end=-1,
                                   var_name=None):

    name_constraint = iris.Constraint(
        cube_func=lambda c: c.var_name == var_name)
    data_cube = iris.load_cube(data_path, constraint=name_constraint)

    fig = plt.figure()
    impath = os.path.join(
        NEMO_IMAGES_DIR,
        "vert_sect_{0}_{1}_{2}_{3}_{4}.jpeg".format(var_name, i_start, j_start,
                                                    i_end, j_end))

    #Add month_number coordinate
    #coord_categorisation.add_month_number(cube, "time")
    coord_categorisation.add_season(data_cube, "time")

    cube_seasonal = data_cube.aggregated_by("season", analysis.MEAN)

    nplots = cube_seasonal.shape[0]
    ncols = 2
    nrows = nplots // ncols if nplots % ncols == 0 else nplots // ncols + 1

    b, lons, lats = nemo_commons.get_basemap_and_coordinates_from_file(
        T_FILE_PATH, resolution="i")

    print("lons shape: ", lons.shape)

    gs = gridspec.GridSpec(ncols=ncols + 1,
                           nrows=nrows + 1,
                           width_ratios=[1, 1, 0.05],
                           hspace=0.3)  # +1 for the colorbar and for map

    bath_path = os.path.join(EXP_DIR, "bathy_meter.nc")
    the_mask = nemo_commons.get_mask(path=bath_path)

    depths = Dataset(T_FILE_PATH).variables["deptht"][:]
    bathymetry = Dataset(bath_path).variables["Bathymetry"][:]

    vert_mask = np.ma.masked_all(depths.shape + bathymetry.shape)
    for lev, di in enumerate(depths):
        not_mask_j, not_mask_i = np.where(di < bathymetry * 0.95)
        vert_mask[lev, not_mask_j, not_mask_i] = 1

    lons_sel, lats_sel = None, None
    depths_2d = None
    vmin = None
    vmax = None
    nx, ny = None, None
    dists_2d = None
    season_to_section = {}
    for i, season in zip(list(range(nplots)),
                         cube_seasonal.coord("season").points):

        data = cube_seasonal.extract(
            iris.Constraint(season=season)).data.squeeze()

        assert data.ndim == 3
        _, ny, nx = data.shape

        if i_end == -1:
            i_end = nx - 1
        if j_end == -1:
            j_end = ny - 1

        j_mask, i_mask = np.where(~the_mask)
        print("mask shape: ", the_mask.shape)
        print("data shape: ", data.shape)
        data[:, j_mask, i_mask] = np.ma.masked

        i_list, j_list = get_section_hor_indices(i_start=i_start,
                                                 i_end=i_end,
                                                 j_start=j_start,
                                                 j_end=j_end)

        data_sel = data[:, j_list, i_list]
        data_sel = np.ma.masked_where(vert_mask[:, j_list, i_list].mask,
                                      data_sel)
        print("data_sel shape: ", data_sel.shape)
        if lons_sel is None:
            lons_sel = lons[j_list, i_list]
            lats_sel = lats[j_list, i_list]

            p_start = (lats[j_start, i_start], lons[j_start, i_start])
            dists = [
                GreatCircleDistance(p_start, (the_lat, the_lon)).km
                for the_lat, the_lon in zip(lats_sel, lons_sel)
            ]

            dists_2d, depths_2d = np.meshgrid(dists, depths)

        season_to_section[season] = data_sel

        if vmin is None:
            vmin = data_sel.min()
        else:
            vmin = min(vmin, data_sel.min())

        if vmax is None:
            vmax = data_sel.max()
        else:
            vmax = max(vmax, data_sel.max())

    delta = 1.0
    clevs = np.arange(np.floor(vmin), vmax + delta, delta)
    cmap = cm.get_cmap("jet", len(clevs) - 1)
    if clevs is not None:
        bn = BoundaryNorm(clevs, len(clevs) - 1)
    else:
        bn = None

    print("{0}: ".format(var_name), vmin, vmax)
    cs = None
    ax = None
    for i, season in zip(list(range(nplots)),
                         cube_seasonal.coord("season").points):
        print(season)
        row = i // ncols
        col = i % ncols
        ax = fig.add_subplot(gs[row, col])

        ax.set_title(season.upper())
        data = season_to_section[season]
        print(data.min(), data.max())

        #to_plot = np.ma.masked_where(~the_mask, data)
        #print to_plot.min(), to_plot.max()

        #cs = ax.pcolormesh(dists_2d, depths_2d, data, norm = bn, cmap = cmap)
        cs = ax.contourf(dists_2d, depths_2d, data, levels=clevs, cmap=cmap)
        #cs = ax.pcolormesh(dists_2d, depths_2d, data, norm = bn, cmap = cmap)
        #b.drawcoastlines(linewidth=cpp.COASTLINE_WIDTH)
        #b.drawparallels(np.arange(-90, 90, 2))
        #b.drawmeridians(np.arange(-180, 180, 2))

        if col != 0:
            ax.yaxis.set_ticks([])

        ax.xaxis.set_ticks([])

        assert isinstance(ax, Axes)
        ax.invert_yaxis()
        ax.set_ylim(80, 0)  # Disregard areas deeper than 150 m
        if row == 0 and col == 0:
            ax.set_ylabel("Depth (m)", fontdict={"fontsize": 20})
    cb = plt.colorbar(cs,
                      ticks=clevs[::2],
                      cax=fig.add_subplot(gs[:nrows, ncols]))

    ax = fig.add_subplot(gs[nrows, :])
    x, y = b(lons, lats)

    b.drawcoastlines(linewidth=cpp.COASTLINE_WIDTH)
    b.fillcontinents()

    assert isinstance(ax, Axes)
    ax.add_line(
        Line2D([x[j_start, i_start], x[j_end, i_end]],
               [y[j_start, i_start], y[j_end, i_end]],
               linewidth=3))

    fig.savefig(impath, bbox_inches="tight")

    pass
Example #21
0
def draw_seasonal_means_panel(path="", var_name="sosstsst"):
    cube = iris.load_cube(path, constraint=iris.Constraint(cube_func=lambda c: c.var_name == var_name))
    assert isinstance(cube, Cube)


    #Add month_number coordinate
    #coord_categorisation.add_month_number(cube, "time")
    coord_categorisation.add_season(cube, "time")

    cube_seasonal = cube.aggregated_by("season", analysis.MEAN)

    print(cube_seasonal.shape)

    #plot results
    fig = plt.figure(figsize=(7, 4))
    #fig.suptitle(cube.name() + " ({0})".format(cube.units))
    nplots = cube_seasonal.shape[0]
    ncols = 2
    nrows = nplots // ncols if nplots % ncols == 0 else nplots // ncols + 1

    b, lons, lats = nemo_commons.get_basemap_and_coordinates_from_file(T_FILE_PATH, resolution="i")
    x, y = b(lons, lats)
    gs = gridspec.GridSpec(ncols=ncols + 1, nrows=nrows, width_ratios=[1, 1, 0.05], wspace=0)  # +1 for the colorbar
    the_mask = nemo_commons.get_mask(path=os.path.join(EXP_DIR, "bathy_meter.nc"))

    vmin = None
    vmax = None

    for i, season in zip(list(range(nplots)), cube_seasonal.coord("season").points):

        data = cube_seasonal.extract(iris.Constraint(season=season)).data
        the_min = data[the_mask].min()
        the_max = np.percentile(data[the_mask], 95)

        if vmin is None:
            vmin, vmax = the_min, the_max
        else:
            vmin = min(the_min, vmin)
            vmax = max(the_max, vmax)

    print("{0}: ".format(var_name), vmin, vmax)
    cs = None
    for i, season in zip(list(range(nplots)), cube_seasonal.coord("season").points):
        print(season)
        row = i // ncols
        col = i % ncols
        ax = fig.add_subplot(gs[row, col])
        ax.set_title(season.upper())
        data = cube_seasonal.extract(iris.Constraint(season=season)).data

        #plot only upper level of the 3d field if given
        if data.ndim > 2:
            if data.shape[1:] == x.shape:
                data = data[0, :, :]
            else:
                data = data[:, :, 0]

        to_plot = np.ma.masked_where(~the_mask, data)
        print(to_plot.min(), to_plot.max())

        cs = b.pcolormesh(x, y, to_plot, ax=ax, vmin=vmin, vmax=vmax, cmap=cm.get_cmap("jet", 20))
        b.drawcoastlines(linewidth=cpp.COASTLINE_WIDTH)
        b.drawparallels(np.arange(-90, 90, 2))
        b.drawmeridians(np.arange(-180, 180, 2))

    plt.colorbar(cs, cax=fig.add_subplot(gs[:, ncols]))

    fname = "{0}_{1}.jpeg".format(cube_seasonal.var_name, "-".join(cube_seasonal.coord("season").points))
    if not os.path.isdir(NEMO_IMAGES_DIR):
        os.mkdir(NEMO_IMAGES_DIR)
    fig.tight_layout()
    fig.savefig(os.path.join(NEMO_IMAGES_DIR, fname), dpi=cpp.FIG_SAVE_DPI)
def draw_seasonal_means_panel(path="", var_name="sosstsst"):
    cube = iris.load_cube(
        path,
        constraint=iris.Constraint(cube_func=lambda c: c.var_name == var_name))
    assert isinstance(cube, Cube)

    #Add month_number coordinate
    #coord_categorisation.add_month_number(cube, "time")
    coord_categorisation.add_season(cube, "time")

    cube_seasonal = cube.aggregated_by("season", analysis.MEAN)

    print(cube_seasonal.shape)

    #plot results
    fig = plt.figure(figsize=(7, 4))
    #fig.suptitle(cube.name() + " ({0})".format(cube.units))
    nplots = cube_seasonal.shape[0]
    ncols = 2
    nrows = nplots // ncols if nplots % ncols == 0 else nplots // ncols + 1

    b, lons, lats = nemo_commons.get_basemap_and_coordinates_from_file(
        T_FILE_PATH, resolution="i")
    x, y = b(lons, lats)
    gs = gridspec.GridSpec(ncols=ncols + 1,
                           nrows=nrows,
                           width_ratios=[1, 1, 0.05],
                           wspace=0)  # +1 for the colorbar
    the_mask = nemo_commons.get_mask(
        path=os.path.join(EXP_DIR, "bathy_meter.nc"))

    vmin = None
    vmax = None

    for i, season in zip(list(range(nplots)),
                         cube_seasonal.coord("season").points):

        data = cube_seasonal.extract(iris.Constraint(season=season)).data
        the_min = data[the_mask].min()
        the_max = np.percentile(data[the_mask], 95)

        if vmin is None:
            vmin, vmax = the_min, the_max
        else:
            vmin = min(the_min, vmin)
            vmax = max(the_max, vmax)

    print("{0}: ".format(var_name), vmin, vmax)
    cs = None
    for i, season in zip(list(range(nplots)),
                         cube_seasonal.coord("season").points):
        print(season)
        row = i // ncols
        col = i % ncols
        ax = fig.add_subplot(gs[row, col])
        ax.set_title(season.upper())
        data = cube_seasonal.extract(iris.Constraint(season=season)).data

        #plot only upper level of the 3d field if given
        if data.ndim > 2:
            if data.shape[1:] == x.shape:
                data = data[0, :, :]
            else:
                data = data[:, :, 0]

        to_plot = np.ma.masked_where(~the_mask, data)
        print(to_plot.min(), to_plot.max())

        cs = b.pcolormesh(x,
                          y,
                          to_plot,
                          ax=ax,
                          vmin=vmin,
                          vmax=vmax,
                          cmap=cm.get_cmap("jet", 20))
        b.drawcoastlines(linewidth=cpp.COASTLINE_WIDTH)
        b.drawparallels(np.arange(-90, 90, 2))
        b.drawmeridians(np.arange(-180, 180, 2))

    plt.colorbar(cs, cax=fig.add_subplot(gs[:, ncols]))

    fname = "{0}_{1}.jpeg".format(
        cube_seasonal.var_name, "-".join(cube_seasonal.coord("season").points))
    if not os.path.isdir(NEMO_IMAGES_DIR):
        os.mkdir(NEMO_IMAGES_DIR)
    fig.tight_layout()
    fig.savefig(os.path.join(NEMO_IMAGES_DIR, fname), dpi=cpp.FIG_SAVE_DPI)
def plot_cross_section_for_seasons(data_path = "", i_start = 0, j_start = 0, i_end = -1, j_end = -1,
                                   var_name = None):

    name_constraint = iris.Constraint(cube_func=lambda c: c.var_name == var_name)
    data_cube = iris.load_cube(data_path, constraint=name_constraint)


    fig = plt.figure()
    impath = os.path.join(NEMO_IMAGES_DIR, "vert_sect_{0}_{1}_{2}_{3}_{4}.jpeg".format(var_name,
                                                                                       i_start, j_start,
                                                                                       i_end, j_end))


       #Add month_number coordinate
    #coord_categorisation.add_month_number(cube, "time")
    coord_categorisation.add_season(data_cube, "time")

    cube_seasonal = data_cube.aggregated_by("season", analysis.MEAN)

    nplots = cube_seasonal.shape[0]
    ncols = 2
    nrows = nplots // ncols if nplots % ncols == 0 else nplots // ncols + 1

    b, lons, lats = nemo_commons.get_basemap_and_coordinates_from_file(T_FILE_PATH, resolution="i")

    print("lons shape: ", lons.shape)

    gs = gridspec.GridSpec(ncols=ncols + 1, nrows=nrows + 1, width_ratios=[1, 1, 0.05], hspace=0.3)  # +1 for the colorbar and for map

    bath_path = os.path.join(EXP_DIR, "bathy_meter.nc")
    the_mask = nemo_commons.get_mask(path=bath_path)

    depths = Dataset(T_FILE_PATH).variables["deptht"][:]
    bathymetry = Dataset(bath_path).variables["Bathymetry"][:]

    vert_mask = np.ma.masked_all(depths.shape + bathymetry.shape)
    for lev, di in enumerate(depths):
        not_mask_j, not_mask_i = np.where(di < bathymetry * 0.95)
        vert_mask[lev, not_mask_j, not_mask_i] = 1

    lons_sel, lats_sel = None, None
    depths_2d = None
    vmin = None
    vmax = None
    nx, ny = None, None
    dists_2d = None
    season_to_section = {}
    for i, season in zip(list(range(nplots)), cube_seasonal.coord("season").points):

        data = cube_seasonal.extract(iris.Constraint(season=season)).data.squeeze()

        assert data.ndim == 3
        _, ny, nx = data.shape

        if i_end == -1:
            i_end = nx - 1
        if j_end == -1:
            j_end = ny - 1

        j_mask, i_mask = np.where(~the_mask)
        print("mask shape: ", the_mask.shape)
        print("data shape: ", data.shape)
        data[:, j_mask, i_mask] = np.ma.masked

        i_list, j_list = get_section_hor_indices(i_start=i_start, i_end = i_end, j_start=j_start, j_end=j_end)



        data_sel = data[:, j_list, i_list]
        data_sel = np.ma.masked_where(vert_mask[:, j_list, i_list].mask, data_sel)
        print("data_sel shape: ", data_sel.shape)
        if lons_sel is None:
            lons_sel = lons[j_list, i_list]
            lats_sel = lats[j_list, i_list]


            p_start = (lats[j_start, i_start], lons[j_start, i_start])
            dists = [GreatCircleDistance(p_start, (the_lat, the_lon)).km
                     for the_lat, the_lon in zip(lats_sel, lons_sel)]

            dists_2d, depths_2d = np.meshgrid(dists, depths)

        season_to_section[season] = data_sel

        if vmin is None:
            vmin = data_sel.min()
        else:
            vmin = min(vmin, data_sel.min())

        if vmax is None:
            vmax = data_sel.max()
        else:
            vmax = max(vmax, data_sel.max())

    delta = 1.0
    clevs = np.arange(np.floor(vmin), vmax + delta, delta)
    cmap = cm.get_cmap("jet", len(clevs) - 1)
    if clevs is not None:
        bn = BoundaryNorm(clevs, len(clevs) - 1)
    else:
        bn = None

    print("{0}: ".format(var_name), vmin, vmax)
    cs = None
    ax = None
    for i, season in zip(list(range(nplots)), cube_seasonal.coord("season").points):
        print(season)
        row = i // ncols
        col = i % ncols
        ax = fig.add_subplot(gs[row, col])

        ax.set_title(season.upper())
        data = season_to_section[season]
        print(data.min(), data.max())


        #to_plot = np.ma.masked_where(~the_mask, data)
        #print to_plot.min(), to_plot.max()

        #cs = ax.pcolormesh(dists_2d, depths_2d, data, norm = bn, cmap = cmap)
        cs = ax.contourf(dists_2d, depths_2d, data, levels = clevs, cmap = cmap)
        #cs = ax.pcolormesh(dists_2d, depths_2d, data, norm = bn, cmap = cmap)
        #b.drawcoastlines(linewidth=cpp.COASTLINE_WIDTH)
        #b.drawparallels(np.arange(-90, 90, 2))
        #b.drawmeridians(np.arange(-180, 180, 2))

        if col != 0:
            ax.yaxis.set_ticks([])

        ax.xaxis.set_ticks([])

        assert isinstance(ax, Axes)
        ax.invert_yaxis()
        ax.set_ylim(80, 0)  # Disregard areas deeper than 150 m
        if row == 0 and col == 0:
            ax.set_ylabel("Depth (m)", fontdict={"fontsize": 20})
    cb = plt.colorbar(cs, ticks = clevs[::2], cax = fig.add_subplot(gs[:nrows, ncols]))



    ax = fig.add_subplot(gs[nrows, :])
    x, y = b(lons, lats)

    b.drawcoastlines(linewidth=cpp.COASTLINE_WIDTH)
    b.fillcontinents()


    assert isinstance(ax, Axes)
    ax.add_line(Line2D([x[j_start, i_start], x[j_end, i_end]],
                        [y[j_start, i_start], y[j_end, i_end]], linewidth=3))

    fig.savefig(impath, bbox_inches = "tight")




    pass