def testSetPlotContourPlain(self):
   cube = iris.load_cube(iris.sample_data_path('air_temp.pp'))
   plt.subplot(1,2,1)
   cc.setPlot(cube, "Contour", "Automatic", 15, False, None, None)
   plt.subplot(1,2,2)
   qplt.contour(cube, 15)
   plt.show()
示例#2
0
    def test_contour(self):
        qplt.contour(self._small())
        self.check_graphic()

        qplt.contourf(self._small(),
                      coords=["model_level_number", "grid_longitude"])
        self.check_graphic()
示例#3
0
    def test_map(self):
        cube = self._slice(['grid_latitude', 'grid_longitude'])
        qplt.contour(cube)
        self.check_graphic()

        # check that the result of adding 360 to the data is *almost* identically the same result
        lon = cube.coord('grid_longitude')
        lon.points = lon.points + 360
        qplt.contour(cube)
        self.check_graphic()
示例#4
0
 def test_map(self):
     cube = self._slice(['grid_latitude', 'grid_longitude'])
     qplt.contour(cube)
     self.check_graphic()
     
     # check that the result of adding 360 to the data is *almost* identically the same result
     lon = cube.coord('grid_longitude')
     lon.points = lon.points + 360
     qplt.contour(cube)
     self.check_graphic()
示例#5
0
文件: azeq.py 项目: pp-mo/azeq
def simpletest(do_savefig=True, do_showfig=True, savefig_file='./puffer.png'):
    figure = make_puffersphere_figure()
    axes = make_puffersphere_axes()
    axes.stock_img()
    data = istk.global_pp()
    axes.coastlines()
    qplt.contour(data)
    draw_gridlines()
    #axes.coastlines()
    if do_savefig:
        save_figure_for_puffersphere(figure=plt.gcf(), filename=savefig_file)
    if do_showfig:
        plt.show()
def main():
    dir='/data/local2/hador/ostia_reanalysis/' # ELD140
    filename = dir + '*.nc'

    cube = iris.load_cube(filename,'sea_surface_temperature',callback=my_callback)
    #reads in data using a special callback, because it is a nasty netcdf file

    sst_mean = cube.collapsed('time', iris.analysis.MEAN)
    #average all 12 months together

    caribbean = iris.Constraint(
                                    longitude=lambda v: 260 <= v <= 320,
                                    latitude=lambda v: 0 <= v <= 40,
                                    name='sea_surface_temperature'
                                    )

    caribbean_sst_mean = sst_mean.extract(caribbean)
    #extract the Caribbean region
    
    plt.figure()
    contour=qplt.contourf(caribbean_sst_mean, 50)
    contour=qplt.contour(caribbean_sst_mean, 5,colors='k')
    plt.clabel(contour, inline=1, fontsize=10,fmt='%1.1f' )
    plt.gca().coastlines()
    #plt.gca().set_extent((-100,-60,0,40))
    plt.show()
 def testSetPlotContourLargeRange(self):
   cube = iris.load_cube(iris.sample_data_path('air_temp.pp'))
   plt.subplot(1,2,1)
   cc.setPlot(cube, "Contour", "brewer_Blues_09", 15, True, 400, 200)
   plt.subplot(1,2,2)
   contours = qplt.contour(cube, 15, cmap="brewer_Blues_09", vmin=200, vmax=400)
   plt.clabel(contours, inline=1, fontsize=8)
   plt.show()
def add_sea_floor(cube):
    """
    Add a simple sea floor line from the cube mask.

    Parameters
    ----------
    cube: iris.cube.Cube
        Input cube to use to produce the sea floor.

    """
    land_cube = cube.copy()
    land_cube.data = np.ma.array(land_cube.data)
    mask = 1. * land_cube.data.mask
    if mask.shape == ():
        mask = np.zeros_like(land_cube.data)
    land_cube.data = np.ma.masked_where(mask == 0, mask)
    land_cube.data.mask = mask
    qplt.contour(land_cube, 2, cmap='Greys_r', rasterized=True)
示例#9
0
def set_plot(cube, plot_type, cmap, num_contours, contour_labels,
             colorbar_range):
    """
    Produces a plot object for the desired cube using quickplot.

    Args:

    * cube
        The cube to be plotted.

    * plot_type
        String holding the type of plot to be used. Choose from pcolormesh,
        Contour and Contourf.

    * cmap
        String representing the colormap to be used. Can be any of the
        Brewer Colormaps supported by Iris, or Automatic.

    * num_contour
        int holding the number of contours to be plotted.

    * contour_labels
        Boolean representing whether the contours on a Contour plot
        (not contourf) should be labeled.

    * colorbar_range
        Dictionary containing ints representing the max and min to
        which the colorbar will be set.

    """
    # We unpack the colorbar_range dictionary
    colorbar_max = colorbar_range['max']
    colorbar_min = colorbar_range['min']
    # We obtain the levels used to define the contours.
    levels = get_levels(cube, colorbar_max, colorbar_min, num_contours)

    if plot_type == "Filled Contour":
        qplt.contourf(cube,
                      num_contours,
                      cmap=get_colormap(cmap),
                      levels=levels,
                      vmax=colorbar_max,
                      vmin=colorbar_min)
    elif plot_type == "Contour":
        contours = qplt.contour(cube,
                                num_contours,
                                cmap=get_colormap(cmap),
                                levels=levels,
                                vmax=colorbar_max,
                                vmin=colorbar_min)
        if contour_labels:
            plt.clabel(contours, inline=1, fontsize=8)
    else:
        qplt.pcolormesh(cube,
                        cmap=get_colormap(cmap),
                        vmax=colorbar_max,
                        vmin=colorbar_min)
示例#10
0
def plot_band_list(List, fname, lev, O3_bands, rates_in_bands):
    fig = plt.figure(figsize=(13, 18), dpi=100)
    index_grid = np.arange(9).reshape([3, 3])
    gs = gridspec.GridSpec(4, 3)
    for i in range(3):
        for j in range(3):
            plt1 = plt.subplot(gs[i, j])
            divs = List[index_grid[i, j]]

            qplt.contourf(divs, lev, cmap='bwr')
            qplt.contour(divs, levels=[0], colors='black')

            plt.title(
                str(rates_in_bands[index_grid[i, j]]) +
                ' events/year, O$_3$ Percentile ' +
                str(10 * index_grid[i, j]) + '-' +
                str(10 + 10 * index_grid[i, j]))
            plt1.set_yscale('log', basey=10, subsy=None)
            plt.ylim(5, 10000)
            plt1.invert_yaxis()
            plt.ylabel('Pressure (Pa)')
            plt.xlabel('Latitude')

    plt1 = plt.subplot(gs[3, 0])
    divs = List[9]
    qplt.contourf(divs, lev, cmap='bwr')
    qplt.contour(divs, levels=[0], colors='black')

    plt.title(
        str(rates_in_bands[index_grid[i, j]]) +
        ' events/year, O$_3$ Percentile 90-100')
    plt1.set_yscale('log', basey=10, subsy=None)
    plt.ylim(5, 10000)
    plt1.invert_yaxis()
    plt.ylabel('Pressure (Pa)')
    plt.xlabel('Latitude')
    plt.tight_layout()
    plt.show()
    fig.savefig('./figures/' + fname, dpi=200)

    return
示例#11
0
def plot_data(ax, z, t=360):
    ax = plt.subplot(111)

    assert z in zc.points
    assert t in time.points

    alt = iris.Constraint(**{zc.name(): z})
    ts = iris.Constraint(time=t)

    w = data['W'].extract(alt & ts)
    v = data['V'].extract(alt & ts)
    u = data['U'].extract(alt & ts)
    qc = data['QC'].extract(alt & ts)

    c = qplt.contour(qc,
                     coords=[xc.name(), yc.name()],
                     colors='grey',
                     levels=[
                         1e-3,
                     ],
                     alpha=0.8,
                     linewidths=2.5,
                     title="")
    ax.set_title("")

    levels = np.linspace(-20, 20, 201)
    #cw = qplt.contourf(w, coords=[xc.name(), yc.name()],
    #                  cmap=plt.cm.RdBu, vmin=-20., vmax=20.)
    cw = ax.contourf(xc.points,
                     yc.points,
                     w.data,
                     cmap=plt.cm.RdBu_r,
                     levels=levels,
                     extend="both")
    cb = plt.colorbar(cw, cax=cax, orientation='vertical')

    ax.set_title("")

    sl = 10
    ax.quiver(xc.points[::sl],
              yc.points[::sl],
              u.data[::sl, ::sl],
              v.data[::sl, ::sl],
              units='inches',
              scale=75,
              headwidth=2)

    hour = t // 3600
    minute = (t % 3600) / 60
    ax.set_title("Time = %02d:%02d | Alt = %5d m" % (hour, minute, z),
                 loc='left',
                 fontsize=11)
示例#12
0
def main():
    """
    """
    cs = iris.Constraint(pressure=900)
    for i in xrange(1, 37):
        # Load the front variables
        cubes = files.load(datadir + '/xjjhq/xjjhq_fronts' + str(i).zfill(3) +
                           '.pp')
        loc = convert.calc('front_locator_parameter_thw', cubes)
        m1 = convert.calc('thermal_front_parameter_thw', cubes)
        m2 = convert.calc('local_frontal_gradient_thw', cubes)

        # Apply the masking criteria
        mask = np.logical_or(m1.data < 4 * 0.3e-10, m2.data < 4 * 1.35e-5)
        loc.data = np.ma.masked_where(mask, loc.data)
        loc = cs.extract(loc)

        # Plot the locating variable
        qplt.contour(loc, [0], colors='k')
        plt.gca().coastlines()
        plt.gca().gridlines()
        plt.title('Fronts at 900 hPa, T+' + str(i) + ' hours')
        plt.savefig(plotdir + 'fronts' + str(i).zfill(3) + '.png')
示例#13
0
def set_plot(cube, plot_type, cmap, num_contours, contour_labels, colorbar_range):
    """
    Produces a plot object for the desired cube using quickplot.

    Args:

    * cube
        The cube to be plotted.

    * plot_type
        String holding the type of plot to be used. Choose from pcolormesh,
        Contour and Contourf.

    * cmap
        String representing the colormap to be used. Can be any of the
        Brewer Colormaps supported by Iris, or Automatic.

    * num_contour
        int holding the number of contours to be plotted.

    * contour_labels
        Boolean representing whether the contours on a Contour plot
        (not contourf) should be labeled.

    * colorbar_range
        Dictionary containing ints representing the max and min to
        which the colorbar will be set.

    """
    # We unpack the colorbar_range dictionary
    colorbar_max = colorbar_range["max"]
    colorbar_min = colorbar_range["min"]
    # We obtain the levels used to define the contours.
    levels = get_levels(cube, colorbar_max, colorbar_min, num_contours)

    if plot_type == "Filled Contour":
        qplt.contourf(cube, num_contours, cmap=get_colormap(cmap), levels=levels, vmax=colorbar_max, vmin=colorbar_min)
    elif plot_type == "Contour":
        contours = qplt.contour(
            cube, num_contours, cmap=get_colormap(cmap), levels=levels, vmax=colorbar_max, vmin=colorbar_min
        )
        if contour_labels:
            plt.clabel(contours, inline=1, fontsize=8)
    else:
        qplt.pcolormesh(cube, cmap=get_colormap(cmap), vmax=colorbar_max, vmin=colorbar_min)
示例#14
0
def plot_data(ax, z, t=360):
    ax = plt.subplot(111)
    
    assert z in zc.points
    assert t in time.points
    
    alt = iris.Constraint(**{zc.name(): z})
    ts = iris.Constraint(time=t)

    w = data['W'].extract(alt & ts)
    v = data['V'].extract(alt & ts)
    u = data['U'].extract(alt & ts)
    qc = data['QC'].extract(alt & ts)
        
    c = qplt.contour(qc, coords=[xc.name(), yc.name()],
                     colors='grey', levels=[1e-3, ],
                     alpha=0.8, linewidths=2.5,
                     title="")
    ax.set_title("")
    
    levels = np.linspace(-20, 20, 201)
    #cw = qplt.contourf(w, coords=[xc.name(), yc.name()],
    #                  cmap=plt.cm.RdBu, vmin=-20., vmax=20.)
    cw = ax.contourf(xc.points, yc.points, w.data,
                     cmap=plt.cm.RdBu_r, levels=levels, extend="both")
    cb = plt.colorbar(cw, cax=cax, orientation='vertical')
    
    ax.set_title("")
    
    sl = 10
    ax.quiver(xc.points[::sl], yc.points[::sl], 
              u.data[::sl, ::sl], v.data[::sl, ::sl],
              units='inches', scale=75, headwidth=2)
    
    hour = t // 3600
    minute = (t % 3600) / 60
    ax.set_title("Time = %02d:%02d | Alt = %5d m" % (hour, minute, z),
                 loc='left', fontsize=11)
示例#15
0
 def test_xaxis_labels(self):
     qplt.contour(self.cube, coords=("str_coord", "bar"))
     self.assertPointsTickLabels("xaxis")
示例#16
0
 def test_contour(self):
     qplt.contour(self._small())
     self.check_graphic()
     
     qplt.contourf(self._small(), coords=['model_level_number', 'grid_longitude'])
     self.check_graphic()
示例#17
0
from __future__ import (absolute_import, division, print_function)
from six.moves import (filter, input, map, range, zip)  # noqa

import matplotlib.pyplot as plt

import iris
import iris.quickplot as qplt

fname = iris.sample_data_path('air_temp.pp')
temperature_cube = iris.load_cube(fname)

# Add a contour, and put the result in a variable called contour.
contour = qplt.contour(temperature_cube)

# Add coastlines to the map created by contour.
plt.gca().coastlines()

# Add contour labels based on the contour we have just created.
plt.clabel(contour, inline=False)

plt.show()
示例#18
0
 def test_xaxis_labels(self):
     qplt.contour(self.cube, coords=("str_coord", "bar"))
     self.assertPointsTickLabels("xaxis")
def PV_along_trajectories(folder='IOP3/T42',
                          time_string='20160922_12',
                          name='',
                          theta_min=24,
                          no_of_trajs=10,
                          plotnotmean=True):

    TrEn = load('/storage/silver/scenario/bn826011/WCB_outflow/Final/' +
                folder + '/inflow/' + time_string +
                '_3DTrajectoryEnsemble_new' + name)
    # load arbitrary set of 3D trajectories

    times = TrEn.times
    # get array containing pertinent times

    clpv = iris.load(
        '/storage/silver/NCAS-Weather/ben/nawdex/mi-ar482/' + time_string +
        '/prodm_op_gl-mn_' + time_string + '_c*_thsfcs.nc',
        'ertel_potential_vorticity')
    clpv[-1] = iris.util.new_axis(clpv[-1], 'time')
    pvcube = clpv.concatenate_cube()
    # load full 3D PV fields for corresponding case
    # could restrict this to pertinent times to save processing time

    cldt = iris.load('/storage/silver/NCAS-Weather/ben/nawdex/mi-ar482/' +
                     time_string + '/prodm_op_gl-mn_' + time_string +
                     '_b*_thsfcs.nc', 'total_minus_adv_only_theta'
                     )  #  '_c*_thsfcs_5K.nc', 'ertel_potential_vorticity')
    cldt[-1] = iris.util.new_axis(cldt[-1], 'time')
    dtcube = cldt.concatenate_cube()
    # same for diabatic heating proxy

    delta_lat = np.mean(np.diff(pvcube.coord('latitude').points[:10])) / 2
    # spacing of latitude grid
    delta_lon = np.mean(np.diff(pvcube.coord('longitude').points[:10])) / 2
    # spacing of longitude grid

    trajectory_bin = []
    for traj in TrEn:
        if abs(traj.data[0, 3] - traj.data[-1, 3]) > theta_min and min(
                traj.data[:, 3]) > 300:
            trajectory_bin.append(traj)
    # make a list of trajectories which ascend the most
    # NOTE: the data I have for some reason only goes down to 300K - possible drawback

    n = int(max(np.floor(len(trajectory_bin) / no_of_trajs), 1))
    # interval of selection based on desired number of trajectories

    for figno, trajex in enumerate(trajectory_bin[::n]):

        lat = trajex.data[:, 1]
        lon = trajex.data[:, 0]
        theta = trajex.data[:, 3]

        pvs = []
        dts = []

        for i in range(len(times)):
            lat_constraint = iris.Constraint(latitude=lambda cell: lat[
                i] - delta_lat < cell < lat[i] + delta_lat)
            lon_constraint = iris.Constraint(longitude=lambda cell: lon[
                i] - delta_lon < cell < lon[i] + delta_lon)
            time_constraint = iris.Constraint(time=times[i])
            pvs.append(
                pvcube.extract(lat_constraint & lon_constraint
                               & time_constraint))
            dts.append(
                dtcube.extract(lat_constraint & lon_constraint
                               & time_constraint))

        ### hack fix for points not being found
        ncl = []
        tcl = []
        try:
            for cube in pvs:
                if cube.ndim == 1:
                    ncl.append(cube)
                elif cube.ndim == 2:
                    ncl.append(cube[:, 0])
                else:
                    ncl.append(cube[:, 0, 0])
            ### hack fix for points not being found
            for cube in dts:
                if cube.ndim == 1:
                    tcl.append(cube)
                elif cube.ndim == 2:
                    tcl.append(cube[:, 0])
                else:
                    tcl.append(cube[:, 0, 0])
            ### hack fix for points not being found

            pvtrajcubes = iris.cube.CubeList(ncl)
            dttrajcubes = iris.cube.CubeList(tcl)

            pvmerge = pvtrajcubes.merge_cube()
            dtmerge = dttrajcubes.merge_cube()

            if plotnotmean:

                plt.figure(figsize=(12, 12))
                plt.subplot(2, 2, 1)
                qplt.contourf(pvmerge, np.linspace(-3, 3, 25), cmap='RdBu_r')
                plt.plot(times, theta)
                plt.subplot(2, 2, 2)
                qplt.contourf(dtmerge, np.linspace(-25, 25, 26), cmap='RdBu_r')
                plt.plot(times, theta)
                plt.subplot(2, 1, 2)
                qplt.contour(pvcube[14, 10], [2])
                plt.gca().coastlines()
                plt.plot(lon - 360, lat, linewidth=3)
                plt.savefig('PV_dtheta_trajectory_crosssection_' + str(figno) +
                            '_' + time_string + '.png')
                plt.show()
        except AttributeError as e:
            print e

        else:

            if figno == 0:
                pvarray = np.array([pvmerge.data])
                dtarray = np.array([dtmerge.data])
                thetarray = np.array([theta])
                # for the first profile, initialise a numpy array
            else:
                pvarray = np.append(pvarray, [pvmerge.data], axis=0)
                dtarray = np.append(dtarray, [dtmerge.data], axis=0)
                thetarray = np.append(thetarray, [theta], axis=0)

    if not plotnotmean:

        lts = len(times)

        pvmean = np.mean(pvarray, axis=0)
        dtmean = np.mean(dtarray, axis=0)
        thetamean = np.mean(thetarray, axis=0)
        # create mean fields along trajectories

        ytheta = np.repeat([np.linspace(300, 340, 17)], lts, axis=0)
        xtime = np.repeat([np.linspace(0, (lts - 1) * 6, lts)], 17, axis=0).T
        # create arrays for axes

        plt.figure(figsize=(12, 8))
        plt.subplot(1, 2, 1)
        plt.contourf(xtime,
                     ytheta,
                     pvmean,
                     np.linspace(-3, 3, 25),
                     cmap='RdBu_r')
        plt.plot(np.linspace((lts - 1) * 6, 0, lts), thetamean)
        plt.title('Average PV along trajectory for > 20K ascent')
        plt.xlabel('time from start, hours')
        plt.ylabel('theta, kelvin')
        plt.subplot(1, 2, 2)
        plt.contourf(xtime,
                     ytheta,
                     dtmean,
                     np.linspace(-25, 25, 26),
                     cmap='RdBu_r')
        plt.plot(np.linspace((lts - 1) * 6, 0, lts), thetamean)
        plt.title('Average diabatic heating')
        plt.xlabel('time, hours')
        plt.savefig('PV_dtheta_trajectory_crosssection_mean_' + time_string +
                    '.png')
        plt.show()
示例#20
0
 def test_xaxis_labels(self):
     qplt.contour(self.cube, coords=('str_coord', 'bar'))
     self.assertPointsTickLabels('xaxis')
def make_transect_contours(
    cfg,
    metadata,
    filename,
):
    """
    Make a contour plot of the transect for an indivudual model.

    This tool loads the cube from the file, checks that the units are
    sensible BGC units, checks for layers, adjusts the titles accordingly,
    determines the ultimate file name and format, then saves the image.

    Parameters
    ----------
    cfg: dict
        the opened global config dictionairy, passed by ESMValTool.
    metadata: dict
        The metadata dictionairy for a specific model.
    filename: str
        The preprocessed model file.

    """
    # Load cube and set up units
    cube = iris.load_cube(filename)
    cube = diagtools.bgc_units(cube, metadata['short_name'])
    cube = make_depth_safe(cube)

    # Load threshold/thresholds.
    plot_details = {}
    colours = []
    thresholds = diagtools.load_thresholds(cfg, metadata)
    linewidths = [1 for thres in thresholds]
    linestyles = ['-' for thres in thresholds]

    cubes = make_cube_region_dict(cube)
    for region, cube in cubes.items():
        for itr, thres in enumerate(thresholds):
            colour = diagtools.get_colour_from_cmap(itr, len(thresholds))
            label = str(thres) + ' ' + str(cube.units)
            colours.append(colour)
            plot_details[thres] = {
                'c': colour,
                'lw': 1,
                'ls': '-',
                'label': label
            }

        qplt.contour(cube,
                     thresholds,
                     colors=colours,
                     linewidths=linewidths,
                     linestyles=linestyles,
                     rasterized=True)

        # Determine y log scale.
        if determine_set_y_logscale(cfg, metadata):
            plt.axes().set_yscale('log')

        add_sea_floor(cube)

        # Add legend
        diagtools.add_legend_outside_right(plot_details,
                                           plt.gca(),
                                           column_width=0.08,
                                           loc='below')

        # Add title to plot
        title = ' '.join([
            metadata['dataset'], metadata['long_name'],
            determine_transect_str(cube, region)
        ])
        titlify(title)

        # Load image format extention
        image_extention = diagtools.get_image_format(cfg)

        # Determine image filename:
        if metadata['dataset'].find('MultiModel') > -1:
            path = diagtools.folder(
                cfg['plot_dir']) + os.path.basename(filename)
            path.replace('.nc', region + '_transect_contour' + image_extention)
        else:
            path = diagtools.get_image_path(
                cfg,
                metadata,
                suffix=region + 'transect_contour' + image_extention,
            )

        # Saving files:
        if cfg['write_plots']:
            logger.info('Saving plots to %s', path)
            plt.savefig(path)

        plt.close()
def multi_model_contours(
    cfg,
    metadatas,
):
    """
    Make a multi model comparison plot showing several transect contour plots.

    This tool loads several cubes from the files, checks that the units are
    sensible BGC units, checks for layers, adjusts the titles accordingly,
    determines the ultimate file name and format, then saves the image.

    Parameters
    ----------
    cfg: dict
        the opened global config dictionairy, passed by ESMValTool.
    metadatas: dict
        The metadatas dictionairy for a specific model.

    """
    ####
    # Load the data for each layer as a separate cube
    model_cubes = {}
    regions = {}
    thresholds = {}
    set_y_logscale = True

    for filename in sorted(metadatas):
        cube = iris.load_cube(filename)
        cube = diagtools.bgc_units(cube, metadatas[filename]['short_name'])
        cube = make_depth_safe(cube)
        cubes = make_cube_region_dict(cube)
        model_cubes[filename] = cubes
        for region in model_cubes[filename]:
            regions[region] = True

        # Determine y log scale.
        set_y_logscale = determine_set_y_logscale(cfg, metadatas[filename])

        # Load threshold/thresholds.
        tmp_thresholds = diagtools.load_thresholds(cfg, metadatas[filename])
        for threshold in tmp_thresholds:
            thresholds[threshold] = True

    # Load image format extention
    image_extention = diagtools.get_image_format(cfg)

    # Make a plot for each layer and each threshold
    for region, threshold in itertools.product(regions, thresholds):
        logger.info('plotting threshold: \t%s', threshold)
        title = ''
        plot_details = {}

        # Plot each file in the group
        for index, filename in enumerate(sorted(metadatas)):
            color = diagtools.get_colour_from_cmap(index, len(metadatas))
            linewidth = 1.
            linestyle = '-'
            # Determine line style for MultiModel statistics:
            if 'MultiModel' in metadatas[filename]['dataset']:
                linewidth = 2.
                linestyle = ':'
            # Determine line style for Observations
            if metadatas[filename]['project'] in diagtools.get_obs_projects():
                color = 'black'
                linewidth = 1.7
                linestyle = '-'

            qplt.contour(model_cubes[filename][region], [
                threshold,
            ],
                         colors=[
                             color,
                         ],
                         linewidths=linewidth,
                         linestyles=linestyle,
                         rasterized=True)

            plot_details[filename] = {
                'c': color,
                'ls': linestyle,
                'lw': linewidth,
                'label': metadatas[filename]['dataset']
            }

            if set_y_logscale:
                plt.axes().set_yscale('log')

            title = metadatas[filename]['long_name']
            units = str(model_cubes[filename][region].units)

            add_sea_floor(model_cubes[filename][region])

        # Add title, threshold, legend to plots
        title = ' '.join([
            title,
            str(threshold), units,
            determine_transect_str(model_cubes[filename][region], region)
        ])
        titlify(title)
        plt.legend(loc='best')

        # Saving files:
        if cfg['write_plots']:
            path = diagtools.get_image_path(
                cfg,
                metadatas[filename],
                prefix='MultipleModels',
                suffix='_'.join([
                    'contour_tramsect', region,
                    str(threshold) + image_extention
                ]),
                metadata_id_list=[
                    'field', 'short_name', 'preprocessor', 'diagnostic',
                    'start_year', 'end_year'
                ],
            )

        # Resize and add legend outside thew axes.
        plt.gcf().set_size_inches(9., 6.)
        diagtools.add_legend_outside_right(plot_details,
                                           plt.gca(),
                                           column_width=0.15)

        logger.info('Saving plots to %s', path)
        plt.savefig(path)
        plt.close()
def make_map_contour(
    cfg,
    metadata,
    filename,
):
    """
    Make a simple contour map plot for an individual model.

    Parameters
    ----------
    cfg: dict
        the opened global config dictionary, passed by ESMValTool.
    metadata: dict
        the metadata dictionary
    filename: str
        the preprocessed model file.

    """
    # Load cube and set up units
    cube = iris.load_cube(filename)
    cube = diagtools.bgc_units(cube, metadata['short_name'])

    # Is this data is a multi-model dataset?
    multi_model = metadata['dataset'].find('MultiModel') > -1

    # Make a dict of cubes for each layer.
    cubes = diagtools.make_cube_layer_dict(cube)

    # Load image format extention and threshold.thresholds.
    image_extention = diagtools.get_image_format(cfg)

    # Load threshold/thresholds.
    plot_details = {}
    colours = []
    thresholds = diagtools.load_thresholds(cfg, metadata)

    for itr, thres in enumerate(thresholds):
        if len(thresholds) > 1:
            colour = plt.cm.jet(float(itr) / float(len(thresholds) - 1.))
        else:
            colour = plt.cm.jet(0)
        label = str(thres) + ' ' + str(cube.units)
        colours.append(colour)
        plot_details[thres] = {'c': colour, 'lw': 1, 'ls': '-', 'label': label}

    linewidths = [1 for thres in thresholds]
    linestyles = ['-' for thres in thresholds]
    # Making plots for each layer
    for layer_index, (layer, cube_layer) in enumerate(cubes.items()):
        layer = str(layer)
        qplt.contour(cube_layer,
                     thresholds,
                     colors=colours,
                     linewidths=linewidths,
                     linestyles=linestyles,
                     rasterized=True)

        try:
            plt.gca().coastlines()
        except AttributeError:
            logger.warning('Not able to add coastlines')
        try:
            plt.gca().add_feature(cartopy.feature.LAND,
                                  zorder=10,
                                  facecolor=[0.8, 0.8, 0.8])
        except AttributeError:
            logger.warning('Not able to add coastlines')
        # Add legend
        diagtools.add_legend_outside_right(plot_details,
                                           plt.gca(),
                                           column_width=0.02,
                                           loc='below')

        # Add title to plot
        title = ' '.join([metadata['dataset'], metadata['long_name']])
        depth_units = str(cube_layer.coords('depth')[0].units)
        if layer:
            title = '{} ({} {})'.format(title, layer, depth_units)
        plt.title(title)

        # Determine image filename:
        if multi_model:
            path = os.path.join(diagtools.folder(cfg['plot_dir']),
                                os.path.basename(filename))
            path = path.replace('.nc', '_contour_map_' + str(layer_index))
            path = path + image_extention
        else:
            path = diagtools.get_image_path(
                cfg,
                metadata,
                suffix='_contour_map_' + str(layer_index) + image_extention,
            )

        # Saving files:
        if cfg['write_plots']:
            logger.info('Saving plots to %s', path)
            plt.savefig(path)

        plt.close()
def multi_model_contours(
    cfg,
    metadata,
):
    """
    Make a contour map showing several models.

    Parameters
    ----------
    cfg: dict
        the opened global config dictionary, passed by ESMValTool.
    metadata: dict
        the metadata dictionary.

    """
    ####
    # Load the data for each layer as a separate cube
    model_cubes = {}
    layers = {}
    for filename in sorted(metadata):
        cube = iris.load_cube(filename)
        cube = diagtools.bgc_units(cube, metadata[filename]['short_name'])

        cubes = diagtools.make_cube_layer_dict(cube)
        model_cubes[filename] = cubes
        for layer in cubes:
            layers[layer] = True

    # Load image format extention
    image_extention = diagtools.get_image_format(cfg)

    # Load threshold/thresholds.
    thresholds = diagtools.load_thresholds(cfg, metadata)

    # Make a plot for each layer and each threshold
    for layer, threshold in itertools.product(layers, thresholds):

        title = ''
        z_units = ''
        plot_details = {}
        cmap = plt.cm.get_cmap('jet')
        land_drawn = False

        # Plot each file in the group
        for index, filename in enumerate(sorted(metadata)):

            if len(metadata) > 1:
                color = cmap(index / (len(metadata) - 1.))
            else:
                color = 'blue'
            linewidth = 1.
            linestyle = '-'

            # Determine line style for Observations
            if metadata[filename]['project'] in diagtools.get_obs_projects():
                color = 'black'
                linewidth = 1.7
                linestyle = '-'

            # Determine line style for MultiModel statistics:
            if 'MultiModel' in metadata[filename]['dataset']:
                color = 'black'
                linestyle = ':'
                linewidth = 1.4

            cube = model_cubes[filename][layer]
            qplt.contour(cube, [
                threshold,
            ],
                         colors=[
                             color,
                         ],
                         linewidths=linewidth,
                         linestyles=linestyle,
                         rasterized=True)
            plot_details[filename] = {
                'c': color,
                'ls': linestyle,
                'lw': linewidth,
                'label': metadata[filename]['dataset']
            }

            if not land_drawn:
                try:
                    plt.gca().coastlines()
                except AttributeError:
                    logger.warning('Not able to add coastlines')
                plt.gca().add_feature(cartopy.feature.LAND,
                                      zorder=10,
                                      facecolor=[0.8, 0.8, 0.8])
                land_drawn = True

            title = metadata[filename]['long_name']
            if layer != '':
                z_units = model_cubes[filename][layer].coords('depth')[0].units
            units = str(model_cubes[filename][layer].units)

        # Add title, threshold, legend to plots
        title = ' '.join([title, str(threshold), units])
        if layer:
            title = ' '.join([title, '(', str(layer), str(z_units), ')'])
        plt.title(title)
        plt.legend(loc='best')

        # Saving files:
        if cfg['write_plots']:
            path = diagtools.get_image_path(
                cfg,
                metadata[filename],
                prefix='MultipleModels_',
                suffix='_'.join([
                    '_contour_map_',
                    str(threshold),
                    str(layer) + image_extention
                ]),
                metadata_id_list=[
                    'field', 'short_name', 'preprocessor', 'diagnostic',
                    'start_year', 'end_year'
                ],
            )

        # Resize and add legend outside thew axes.
        plt.gcf().set_size_inches(9., 6.)
        diagtools.add_legend_outside_right(plot_details,
                                           plt.gca(),
                                           column_width=0.15)

        logger.info('Saving plots to %s', path)
        plt.savefig(path)
        plt.close()
示例#25
0
def make_map_extent_plots(
    cfg,
    metadata,
    filename,
):
    """
    Make an extent map plot showing several times for an individual model.

    Parameters
    ----------
    cfg: dict
        the opened global config dictionairy, passed by ESMValTool.
    metadata: dict
        The metadata dictionairy for a specific model.
    filename: str
        The preprocessed model file.

    """
    # Load cube and set up units
    cube = iris.load_cube(filename)
    iris.coord_categorisation.add_year(cube, 'time')
    cube = diagtools.bgc_units(cube, metadata['short_name'])
    cube = agregate_by_season(cube)

    # Is this data is a multi-model dataset?
    multi_model = metadata['dataset'].find('MultiModel') > -1

    # Make a dict of cubes for each layer.
    cubes = diagtools.make_cube_layer_dict(cube)

    # Load image format extention
    image_extention = diagtools.get_image_format(cfg)

    # Load threshold, pole and season
    threshold = float(cfg['threshold'])
    pole = get_pole(cube)
    season = get_season(cube)

    # Start making figure
    for layer_index, (layer, cube_layer) in enumerate(cubes.items()):

        fig = plt.figure()
        fig.set_size_inches(7, 7)

        if pole == 'North':  # North Hemisphere
            projection = cartopy.crs.NorthPolarStereo()
            ax1 = plt.subplot(111, projection=projection)
            ax1.set_extent([-180, 180, 50, 90], cartopy.crs.PlateCarree())

        if pole == 'South':  # South Hemisphere
            projection = cartopy.crs.SouthPolarStereo()
            ax1 = plt.subplot(111, projection=projection)
            ax1.set_extent([-180, 180, -90, -50], cartopy.crs.PlateCarree())
        try:
            ax1.add_feature(cartopy.feature.LAND,
                            zorder=10,
                            facecolor=[0.8, 0.8, 0.8])
        except ConnectionRefusedError:
            logger.error('Cartopy was unable add coastlines due to  a '
                         'connection error.')

        ax1.gridlines(linewidth=0.5,
                      color='black',
                      zorder=20,
                      alpha=0.5,
                      linestyle='--')

        try:
            plt.gca().coastlines()
        except AttributeError:
            logger.warning('make_polar_map: Not able to add coastlines')

        times = np.array(cube.coord('time').points.astype(float))
        plot_desc = {}
        for time_itr, time in enumerate(times):
            cube = cube_layer[time_itr]
            line_width = 1
            color = plt.cm.jet(float(time_itr) / float(len(times)))
            label = get_year(cube)
            plot_desc[time] = {
                'label': label,
                'c': [
                    color,
                ],
                'lw': [
                    line_width,
                ],
                'ls': [
                    '-',
                ]
            }

            layer = str(layer)
            qplt.contour(cube, [
                threshold,
            ],
                         colors=plot_desc[time]['c'],
                         linewidths=plot_desc[time]['lw'],
                         linestyles=plot_desc[time]['ls'],
                         rasterized=True)

        # Add legend
        legend_size = len(plot_desc) + 1
        ncols = int(legend_size / 25) + 1
        ax1.set_position([
            ax1.get_position().x0,
            ax1.get_position().y0,
            ax1.get_position().width * (1. - 0.1 * ncols),
            ax1.get_position().height
        ])

        fig.set_size_inches(7 + ncols * 1.2, 7)

        # Construct dummy plots.
        for i in sorted(plot_desc):
            plt.plot(
                [],
                [],
                c=plot_desc[i]['c'][0],
                lw=plot_desc[i]['lw'][0],
                ls=plot_desc[i]['ls'][0],
                label=plot_desc[i]['label'],
            )

        legd = ax1.legend(loc='center left',
                          ncol=ncols,
                          prop={'size': 10},
                          bbox_to_anchor=(1., 0.5))
        legd.draw_frame(False)
        legd.get_frame().set_alpha(0.)

        # Add title to plot
        title = ' '.join([
            metadata['dataset'],
        ])
        if layer:
            title = ' '.join([
                title, '(', layer,
                str(cube_layer.coords('depth')[0].units), ')'
            ])
        plt.title(title)

        # Determine image filename:
        suffix = '_'.join(['ortho_map', pole, season, str(layer_index)])
        suffix = suffix.replace(' ', '') + image_extention
        if multi_model:
            path = diagtools.folder(cfg['plot_dir'])
            path = path + os.path.basename(filename)
            path = path.replace('.nc', suffix)
        else:
            path = diagtools.get_image_path(
                cfg,
                metadata,
                suffix=suffix,
            )

        # Saving files:
        if cfg['write_plots']:
            logger.info('Saving plots to %s', path)
            plt.savefig(path)
        plt.close()
示例#26
0
def permafrost_area(soiltemp, airtemp, landfrac, run):
    """Calculate the permafrost area and make a plot."""
    # Define parameters of the test to calculate the existence of permafrost
    thresh_temperature = 273.2
    frozen_months = 24
    prop_months_frozen = 0.5  # frozen for at least half of the simulation

    # make a mask of land fraction over non iced areas and extract northern
    # latitudes
    nonice = get_nonice_mask(run)
    mask = iris.analysis.maths.multiply(nonice, landfrac)
    mask = mask.extract(iris.Constraint(latitude=lambda cell: cell > 0))

    # extract northern high latitudes [and deeepst soil level]
    soiltemp = soiltemp.extract(iris.Constraint(depth=2.0))  # from 1m to 3m

    # Make an aggregator to define the permafrost extent
    # I dont really understand this but it works
    frozen_count = iris.analysis.Aggregator('frozen_count',
                                            num_frozen,
                                            units_func=lambda units: 1)

    # Calculate the permafrost locations
    pf_periods = soiltemp.collapsed('time',
                                    frozen_count,
                                    threshold=thresh_temperature,
                                    frozen_length=frozen_months)
    tot_time = len(soiltemp.coord('time').points)
    pf_periods = pf_periods / float(tot_time)
    pf_periods.rename('Fraction of months layer 4 (-1m to -3m) soil is frozen')

    # mask out non permafrost points, sea points and ice points
    pf_periods.data = np.ma.masked_less(pf_periods.data, prop_months_frozen)
    # set all non-masked values to 1 for area calculation
    # (may be a better way of doing this but I'm not sure what it is)
    pf_periods = pf_periods / pf_periods
    # mask for land area also
    pf_periods = pf_periods * mask

    # calculate the area of permafrost
    # Generate area-weights array. This method requires bounds on lat/lon
    # coords, add some in sensible locations using the "guess_bounds"
    # method.
    for coord in ['latitude', 'longitude']:
        if not pf_periods.coord(coord).has_bounds():
            pf_periods.coord(coord).guess_bounds()
    grid_areas = iris.analysis.cartography.area_weights(pf_periods)
    # calculate the areas not masked in pf_periods
    pf_area = pf_periods.collapsed(['longitude', 'latitude'],
                                   iris.analysis.SUM,
                                   weights=grid_areas).data

    # what is the area where the temperature is less than 0 degrees C?
    airtemp = airtemp.collapsed('time', iris.analysis.MEAN)
    # if more than 2 dims, select the ground level
    if airtemp.ndim > 2:
        airtemp = airtemp[0]
    airtemp_below_zero = np.where(airtemp.data < 273.2, 1, 0)
    airtemp_area = np.sum(airtemp_below_zero * grid_areas)

    pf_prop = pf_area / airtemp_area
    pf_area = pf_area / 1e12

    # Figure Permafrost extent north america
    plt.figure(figsize=(8, 8))
    ax = plt.axes(projection=ccrs.Orthographic(central_longitude=-80.0,
                                               central_latitude=60.0))
    qplt.pcolormesh(pf_periods)
    ax.gridlines()
    ax.coastlines()
    levels = [thresh_temperature]
    qplt.contour(airtemp, levels, colors='k', linewidths=3)
    plt.title('Permafrost extent & zero degree isotherm ({})'.format(
        run['runid']))
    plt.savefig('pf_extent_north_america_' + run['runid'] + '.png')

    # Figure Permafrost extent asia
    plt.figure(figsize=(8, 8))
    ax = plt.axes(projection=ccrs.Orthographic(central_longitude=100.0,
                                               central_latitude=50.0))
    qplt.pcolormesh(pf_periods)
    ax.gridlines()
    ax.coastlines()
    levels = [thresh_temperature]
    qplt.contour(airtemp, levels, colors='k', linewidths=3)
    plt.title('Permafrost extent & zero degree isotherm ({})'.format(
        run['runid']))
    plt.savefig('pf_extent_asia_' + run['runid'] + '.png')

    # defining metrics for return up to top level
    metrics = {
        'permafrost area': pf_area,
        'fraction area permafrost over zerodeg': pf_prop,
    }

    return metrics
示例#27
0
from __future__ import (absolute_import, division, print_function)
from six.moves import (filter, input, map, range, zip)  # noqa

import matplotlib.pyplot as plt

import iris
import iris.quickplot as qplt


fname = iris.sample_data_path('air_temp.pp')
temperature_cube = iris.load_cube(fname)

# Add a contour, and put the result in a variable called contour.
contour = qplt.contour(temperature_cube)

# Add coastlines to the map created by contour.
plt.gca().coastlines()

# Add contour labels based on the contour we have just created.
plt.clabel(contour, inline=False)

plt.show()