Ejemplo n.º 1
0
def main():
    # load a single cube of surface temperature between +/- 5 latitude
    fname = iris.sample_data_path("ostia_monthly.nc")
    cube = iris.load_cube(
        fname,
        iris.Constraint("surface_temperature", latitude=lambda v: -5 < v < 5),
    )

    # Take the mean over latitude
    cube = cube.collapsed("latitude", iris.analysis.MEAN)

    # Now that we have our data in a nice way, lets create the plot
    # contour with 20 levels
    qplt.contourf(cube, 20)

    # Put a custom label on the y axis
    plt.ylabel("Time / years")

    # Stop matplotlib providing clever axes range padding
    plt.axis("tight")

    # As we are plotting annual variability, put years as the y ticks
    plt.gca().yaxis.set_major_locator(mdates.YearLocator())

    # And format the ticks to just show the year
    plt.gca().yaxis.set_major_formatter(mdates.DateFormatter("%Y"))

    iplt.show()
Ejemplo n.º 2
0
def main():
    # Load some test data.
    fname = iris.sample_data_path("hybrid_height.nc")
    theta = iris.load_cube(fname, "air_potential_temperature")

    # Extract a single height vs longitude cross-section. N.B. This could
    # easily be changed to extract a specific slice, or even to loop over *all*
    # cross section slices.
    cross_section = next(theta.slices(["grid_longitude",
                                       "model_level_number"]))

    qplt.contourf(cross_section,
                  coords=["grid_longitude", "altitude"],
                  cmap="RdBu_r")
    iplt.show()

    # Now do the equivalent plot, only against model level
    plt.figure()

    qplt.contourf(
        cross_section,
        coords=["grid_longitude", "model_level_number"],
        cmap="RdBu_r",
    )
    iplt.show()
Ejemplo n.º 3
0
def main():
    # Enable a future option, to ensure that the netcdf load works the same way
    # as in future Iris versions.
    iris.FUTURE.netcdf_promote = True

    # load a single cube of surface temperature between +/- 5 latitude
    fname = iris.sample_data_path('ostia_monthly.nc')
    cube = iris.load_cube(
        fname,
        iris.Constraint('surface_temperature', latitude=lambda v: -5 < v < 5))

    # Take the mean over latitude
    cube = cube.collapsed('latitude', iris.analysis.MEAN)

    # Now that we have our data in a nice way, lets create the plot
    # contour with 20 levels
    qplt.contourf(cube, 20)

    # Put a custom label on the y axis
    plt.ylabel('Time / years')

    # Stop matplotlib providing clever axes range padding
    plt.axis('tight')

    # As we are plotting annual variability, put years as the y ticks
    plt.gca().yaxis.set_major_locator(mdates.YearLocator())

    # And format the ticks to just show the year
    plt.gca().yaxis.set_major_formatter(mdates.DateFormatter('%Y'))

    iplt.show()
Ejemplo n.º 4
0
def main():
    fname = iris.sample_data_path('ostia_monthly.nc')

    # load a single cube of surface temperature between +/- 5 latitude
    cube = iris.load_cube(fname, iris.Constraint('surface_temperature', latitude=lambda v: -5 < v < 5))

    # Take the mean over latitude
    cube = cube.collapsed('latitude', iris.analysis.MEAN)

    # Now that we have our data in a nice way, lets create the plot
    # contour with 20 levels
    qplt.contourf(cube, 20)

    # Put a custom label on the y axis
    plt.ylabel('Time / years')

    # Stop matplotlib providing clever axes range padding
    plt.axis('tight')

    # As we are plotting annual variability, put years as the y ticks
    plt.gca().yaxis.set_major_locator(mdates.YearLocator())

    # And format the ticks to just show the year
    plt.gca().yaxis.set_major_formatter(mdates.DateFormatter('%Y'))

    iplt.show()
Ejemplo n.º 5
0
def main():
    # Enable a future option, to ensure that the netcdf load works the same way
    # as in future Iris versions.
    iris.FUTURE.netcdf_promote = True

    # Load some test data.
    fname = iris.sample_data_path('hybrid_height.nc')
    theta = iris.load_cube(fname, 'air_potential_temperature')

    # Extract a single height vs longitude cross-section. N.B. This could
    # easily be changed to extract a specific slice, or even to loop over *all*
    # cross section slices.
    cross_section = next(theta.slices(['grid_longitude',
                                       'model_level_number']))

    qplt.contourf(cross_section,
                  coords=['grid_longitude', 'altitude'],
                  cmap='RdBu_r')
    iplt.show()

    # Now do the equivalent plot, only against model level
    plt.figure()

    qplt.contourf(cross_section,
                  coords=['grid_longitude', 'model_level_number'],
                  cmap='RdBu_r')
    iplt.show()
Ejemplo n.º 6
0
def main():
    # Load the whole time-sequence as a single cube.
    file_path = iris.sample_data_path("E1_north_america.nc")
    cube = iris.load_cube(file_path)

    # Make an aggregator from the user function.
    SPELL_COUNT = Aggregator(
        "spell_count", count_spells, units_func=lambda units: 1
    )

    # Define the parameters of the test.
    threshold_temperature = 280.0
    spell_years = 5

    # Calculate the statistic.
    warm_periods = cube.collapsed(
        "time",
        SPELL_COUNT,
        threshold=threshold_temperature,
        spell_length=spell_years,
    )
    warm_periods.rename("Number of 5-year warm spells in 240 years")

    # Plot the results.
    qplt.contourf(warm_periods, cmap="RdYlBu_r")
    plt.gca().coastlines()
    iplt.show()
Ejemplo n.º 7
0
def main():
    fname = iris.sample_data_path("air_temp.pp")

    # Load exactly one cube from the given file.
    temperature = iris.load_cube(fname)

    # We only want a small number of latitudes, so filter some out
    # using "extract".
    temperature = temperature.extract(
        iris.Constraint(latitude=lambda cell: 68 <= cell < 78))

    for cube in temperature.slices("longitude"):

        # Create a string label to identify this cube (i.e. latitude: value).
        cube_label = "latitude: %s" % cube.coord("latitude").points[0]

        # Plot the cube, and associate it with a label.
        qplt.plot(cube, label=cube_label)

    # Add the legend with 2 columns.
    plt.legend(ncol=2)

    # Put a grid on the plot.
    plt.grid(True)

    # Tell matplotlib not to extend the plot axes range to nicely
    # rounded numbers.
    plt.axis("tight")

    # Finally, show it.
    iplt.show()
Ejemplo n.º 8
0
def main():
    # Enable a future option, to ensure that the netcdf load works the same way
    # as in future Iris versions.
    iris.FUTURE.netcdf_promote = True

    # Load some test data.
    fname = iris.sample_data_path('hybrid_height.nc')
    theta = iris.load_cube(fname, 'air_potential_temperature')

    # Extract a single height vs longitude cross-section. N.B. This could
    # easily be changed to extract a specific slice, or even to loop over *all*
    # cross section slices.
    cross_section = next(theta.slices(['grid_longitude',
                                       'model_level_number']))

    qplt.contourf(cross_section, coords=['grid_longitude', 'altitude'],
                  cmap='RdBu_r')
    iplt.show()

    # Now do the equivalent plot, only against model level
    plt.figure()

    qplt.contourf(cross_section,
                  coords=['grid_longitude', 'model_level_number'],
                  cmap='RdBu_r')
    iplt.show()
Ejemplo n.º 9
0
def main():
    fname = iris.sample_data_path('air_temp.pp')

    # Load exactly one cube from the given file.
    temperature = iris.load_cube(fname)

    # We only want a small number of latitudes, so filter some out
    # using "extract".
    temperature = temperature.extract(
        iris.Constraint(latitude=lambda cell: 68 <= cell < 78))

    for cube in temperature.slices('longitude'):

        # Create a string label to identify this cube (i.e. latitude: value).
        cube_label = 'latitude: %s' % cube.coord('latitude').points[0]

        # Plot the cube, and associate it with a label.
        qplt.plot(cube, label=cube_label)

    # Add the legend with 2 columns.
    plt.legend(ncol=2)

    # Put a grid on the plot.
    plt.grid(True)

    # Tell matplotlib not to extend the plot axes range to nicely
    # rounded numbers.
    plt.axis('tight')

    # Finally, show it.
    iplt.show()
Ejemplo n.º 10
0
def main():
    # Load data
    filepath = iris.sample_data_path("orca2_votemper.nc")
    cube = iris.load_cube(filepath)

    # Choose plot projections
    projections = {}
    projections["Mollweide"] = ccrs.Mollweide()
    projections["PlateCarree"] = ccrs.PlateCarree()
    projections["NorthPolarStereo"] = ccrs.NorthPolarStereo()
    projections["Orthographic"] = ccrs.Orthographic(central_longitude=-90,
                                                    central_latitude=45)

    pcarree = projections["PlateCarree"]
    # Transform cube to target projection
    new_cube, extent = iris.analysis.cartography.project(cube,
                                                         pcarree,
                                                         nx=400,
                                                         ny=200)

    # Plot data in each projection
    for name in sorted(projections):
        fig = plt.figure()
        fig.suptitle("ORCA2 Data Projected to {}".format(name))
        # Set up axes and title
        ax = plt.subplot(projection=projections[name])
        # Set limits
        ax.set_global()
        # plot with Iris quickplot pcolormesh
        qplt.pcolormesh(new_cube)
        # Draw coastlines
        ax.coastlines()

        iplt.show()
Ejemplo n.º 11
0
def main():
    fname = iris.sample_data_path("NAME_output.txt")

    boundary_volc_ash_constraint = iris.Constraint("VOLCANIC_ASH_AIR_CONCENTRATION", flight_level="From FL000 - FL200")

    # Callback shown as None to illustrate where a cube-level callback function would be used if required
    cube = iris.load_cube(fname, boundary_volc_ash_constraint, callback=None)

    # draw contour levels for the data (the top level is just a catch-all)
    levels = (0.0002, 0.002, 0.004, 1e10)
    cs = iplt.contourf(cube, levels=levels, colors=("#80ffff", "#939598", "#e00404"))

    # draw a black outline at the lowest contour to highlight affected areas
    iplt.contour(cube, levels=(levels[0], 100), colors="black")

    # set an extent and a background image for the map
    ax = plt.gca()
    ax.set_extent((-90, 20, 20, 75))
    ax.stock_img("ne_shaded")

    # make a legend, with custom labels, for the coloured contour set
    artists, _ = cs.legend_elements()
    labels = [
        r"$%s < x \leq %s$" % (levels[0], levels[1]),
        r"$%s < x \leq %s$" % (levels[1], levels[2]),
        r"$x > %s$" % levels[2],
    ]
    ax.legend(artists, labels, title="Ash concentration / g m-3", loc="upper left")

    time = cube.coord("time")
    time_date = time.units.num2date(time.points[0]).strftime(UTC_format)
    plt.title("Volcanic ash concentration forecast\nvalid at %s" % time_date)

    iplt.show()
Ejemplo n.º 12
0
def main():
    # Enable a future option, to ensure that the netcdf load works the same way
    # as in future Iris versions.
    iris.FUTURE.netcdf_promote = True

    # Load the whole time-sequence as a single cube.
    file_path = iris.sample_data_path('E1_north_america.nc')
    cube = iris.load_cube(file_path)

    # Make an aggregator from the user function.
    SPELL_COUNT = Aggregator('spell_count',
                             count_spells,
                             units_func=lambda units: 1)

    # Define the parameters of the test.
    threshold_temperature = 280.0
    spell_years = 5

    # Calculate the statistic.
    warm_periods = cube.collapsed('time', SPELL_COUNT,
                                  threshold=threshold_temperature,
                                  spell_length=spell_years)
    warm_periods.rename('Number of 5-year warm spells in 240 years')

    # Plot the results.
    qplt.contourf(warm_periods, cmap='RdYlBu_r')
    plt.gca().coastlines()
    iplt.show()
Ejemplo n.º 13
0
def main():
    fname = iris.sample_data_path('/nfs/a266/data/CMIP5_AFRICA/BC_0.5x0.5/IPSL-CM5A-LR/historical/tasmax_WFDEI_1979-2013_0.5x0.5_day_IPSL-CM5A-LR_africa_historical_r1i1p1_full.nc')
    soi = iris.load_cube(fname)

    # Window length for filters.
    window = 121

    # Construct 2-year (24-month) and 7-year (84-month) low pass filters
    # for the SOI data which is monthly.
    wgts24 = low_pass_weights(window, 1. / 24.)
    wgts84 = low_pass_weights(window, 1. / 84.)
    soi24 = soi.rolling_window('time',
                               iris.analysis.SUM,
                               len(wgts24),
                               weights=wgts24)
    soi84 =  soi.rolling_window('time',
                                iris.analysis.SUM,
                                len(wgts84),
                                weights=wgts84)

    # Plot the SOI time series and both filtered versions.
    plt.figure(figsize=(9, 4))
    iplt.plot(soi, color='0.7', linewidth=1., linestyle='-',alpha=1., label='no filter')
    iplt.plot(soi24, color='b', linewidth=2., linestyle='-',alpha=.7, label='2-year filter')
    iplt.plot(soi84, color='r', linewidth=2., linestyle='-',alpha=.7, label='7-year filter')
    plt.ylim([-4, 4])
    plt.title('West Africa')
    plt.xlabel('Time')
    plt.ylabel('SOI')
    plt.legend(fontsize=10)
    iplt.show()
Ejemplo n.º 14
0
def main():
    # Load data
    filepath = iris.sample_data_path('orca2_votemper.nc')
    cube = iris.load_cube(filepath)

    # Choose plot projections
    projections = {}
    projections['Mollweide'] = ccrs.Mollweide()
    projections['PlateCarree'] = ccrs.PlateCarree()
    projections['NorthPolarStereo'] = ccrs.NorthPolarStereo()
    projections['Orthographic'] = ccrs.Orthographic(central_longitude=-90,
                                                    central_latitude=45)

    pcarree = projections['PlateCarree']
    # Transform cube to target projection
    new_cube, extent = iris.analysis.cartography.project(cube, pcarree,
                                                         nx=400, ny=200)

    # Plot data in each projection
    for name in sorted(projections):
        fig = plt.figure()
        fig.suptitle('ORCA2 Data Projected to {}'.format(name))
        # Set up axes and title
        ax = plt.subplot(projection=projections[name])
        # Set limits
        ax.set_global()
        # plot with Iris quickplot pcolormesh
        qplt.pcolormesh(new_cube)
        # Draw coastlines
        ax.coastlines()

        iplt.show()
Ejemplo n.º 15
0
def main():
    # Enable a future option, to ensure that the netcdf load works the same way
    # as in future Iris versions.
    iris.FUTURE.netcdf_promote = True

    # Load the gridded temperature and salinity data.
    fname = iris.sample_data_path('atlantic_profiles.nc')
    cubes = iris.load(fname)
    theta, = cubes.extract('sea_water_potential_temperature')
    salinity, = cubes.extract('sea_water_practical_salinity')

    # Extract profiles of temperature and salinity from a particular point in
    # the southern portion of the domain, and limit the depth of the profile
    # to 1000m.
    lon_cons = iris.Constraint(longitude=330.5)
    lat_cons = iris.Constraint(latitude=lambda l: -10 < l < -9)
    depth_cons = iris.Constraint(depth=lambda d: d <= 1000)
    theta_1000m = theta.extract(depth_cons & lon_cons & lat_cons)
    salinity_1000m = salinity.extract(depth_cons & lon_cons & lat_cons)

    # Plot these profiles on the same set of axes. In each case we call plot
    # with two arguments, the cube followed by the depth coordinate. Putting
    # them in this order places the depth coordinate on the y-axis.
    # The first plot is in the default axes. We'll use the same color for the
    # curve and its axes/tick labels.
    plt.figure(figsize=(5, 6))
    temperature_color = (.3, .4, .5)
    ax1 = plt.gca()
    iplt.plot(theta_1000m, theta_1000m.coord('depth'), linewidth=2,
              color=temperature_color, alpha=.75)
    ax1.set_xlabel('Potential Temperature / K', color=temperature_color)
    ax1.set_ylabel('Depth / m')
    for ticklabel in ax1.get_xticklabels():
        ticklabel.set_color(temperature_color)
    # To plot salinity in the same axes we use twiny(). We'll use a different
    # color to identify salinity.
    salinity_color = (.6, .1, .15)
    ax2 = plt.gca().twiny()
    iplt.plot(salinity_1000m, salinity_1000m.coord('depth'), linewidth=2,
              color=salinity_color, alpha=.75)
    ax2.set_xlabel('Salinity / PSU', color=salinity_color)
    for ticklabel in ax2.get_xticklabels():
        ticklabel.set_color(salinity_color)
    plt.tight_layout()
    iplt.show()

    # Now plot a T-S diagram using scatter. We'll use all the profiles here,
    # and each point will be coloured according to its depth.
    plt.figure(figsize=(6, 6))
    depth_values = theta.coord('depth').points
    for s, t in iris.iterate.izip(salinity, theta, coords='depth'):
        iplt.scatter(s, t, c=depth_values, marker='+', cmap='RdYlBu_r')
    ax = plt.gca()
    ax.set_xlabel('Salinity / PSU')
    ax.set_ylabel('Potential Temperature / K')
    cb = plt.colorbar(orientation='horizontal')
    cb.set_label('Depth / m')
    plt.tight_layout()
    iplt.show()
Ejemplo n.º 16
0
def main():
    file_path = iris.sample_data_path('polar_stereo.grib2')
    cube = iris.load_cube(file_path)
    qplt.contourf(cube)
    ax = plt.gca()
    ax.coastlines()
    ax.gridlines()
    iplt.show()
Ejemplo n.º 17
0
def main():
    file_path = iris.sample_data_path("polar_stereo.grib2")
    cube = iris.load_cube(file_path)
    qplt.contourf(cube)
    ax = plt.gca()
    ax.coastlines()
    ax.gridlines()
    iplt.show()
Ejemplo n.º 18
0
def main():
    file_path = iris.sample_data_path("toa_brightness_stereographic.nc")
    cube = iris.load_cube(file_path)
    qplt.contourf(cube)
    ax = plt.gca()
    ax.coastlines()
    ax.gridlines()
    iplt.show()
Ejemplo n.º 19
0
def main():
    # Load the monthly-valued Southern Oscillation Index (SOI) time-series.
    fname = iris.sample_data_path("SOI_Darwin.nc")
    soi = iris.load_cube(fname)

    # Window length for filters.
    window = 121

    # Construct 2-year (24-month) and 7-year (84-month) low pass filters
    # for the SOI data which is monthly.
    wgts24 = low_pass_weights(window, 1.0 / 24.0)
    wgts84 = low_pass_weights(window, 1.0 / 84.0)

    # Apply each filter using the rolling_window method used with the weights
    # keyword argument. A weighted sum is required because the magnitude of
    # the weights are just as important as their relative sizes.
    soi24 = soi.rolling_window("time",
                               iris.analysis.SUM,
                               len(wgts24),
                               weights=wgts24)
    soi84 = soi.rolling_window("time",
                               iris.analysis.SUM,
                               len(wgts84),
                               weights=wgts84)

    # Plot the SOI time series and both filtered versions.
    plt.figure(figsize=(9, 4))
    iplt.plot(
        soi,
        color="0.7",
        linewidth=1.0,
        linestyle="-",
        alpha=1.0,
        label="no filter",
    )
    iplt.plot(
        soi24,
        color="b",
        linewidth=2.0,
        linestyle="-",
        alpha=0.7,
        label="2-year filter",
    )
    iplt.plot(
        soi84,
        color="r",
        linewidth=2.0,
        linestyle="-",
        alpha=0.7,
        label="7-year filter",
    )
    plt.ylim([-4, 4])
    plt.title("Southern Oscillation Index (Darwin Only)")
    plt.xlabel("Time")
    plt.ylabel("SOI")
    plt.legend(fontsize=10)
    iplt.show()
Ejemplo n.º 20
0
def main():
    fname = iris.sample_data_path("colpex.pp")

    # The list of phenomena of interest
    phenomena = ["air_potential_temperature", "air_pressure"]

    # Define the constraint on standard name and model level
    constraints = [
        iris.Constraint(phenom, model_level_number=1) for phenom in phenomena
    ]

    air_potential_temperature, air_pressure = iris.load_cubes(
        fname, constraints
    )

    # Define a coordinate which represents 1000 hPa
    p0 = coords.AuxCoord(1000, long_name="P0", units="hPa")
    # Convert reference pressure 'p0' into the same units as 'air_pressure'
    p0.convert_units(air_pressure.units)

    # Calculate Exner pressure
    exner_pressure = (air_pressure / p0) ** (287.05 / 1005.0)
    # Set the name (the unit is scalar)
    exner_pressure.rename("exner_pressure")

    # Calculate air_temp
    air_temperature = exner_pressure * air_potential_temperature
    # Set the name (the unit is K)
    air_temperature.rename("air_temperature")

    # Now create an iterator which will give us lat lon slices of
    # exner pressure and air temperature in the form
    # (exner_slice, air_temp_slice).
    lat_lon_slice_pairs = iris.iterate.izip(
        exner_pressure,
        air_temperature,
        coords=["grid_latitude", "grid_longitude"],
    )

    # For the purposes of this example, we only want to demonstrate the first
    # plot.
    lat_lon_slice_pairs = [next(lat_lon_slice_pairs)]

    plt.figure(figsize=(8, 4))
    for exner_slice, air_temp_slice in lat_lon_slice_pairs:
        plt.subplot(121)
        cont = qplt.contourf(exner_slice)

        # The default colorbar has a few too many ticks on it, causing text to
        # overlap. Therefore, limit the number of ticks.
        limit_colorbar_ticks(cont)

        plt.subplot(122)
        cont = qplt.contourf(air_temp_slice)
        limit_colorbar_ticks(cont)
        iplt.show()
Ejemplo n.º 21
0
def main():
    # Enable a future option, to ensure that the netcdf load works the same way
    # as in future Iris versions.
    iris.FUTURE.netcdf_promote = True

    # Load the monthly-valued Southern Oscillation Index (SOI) time-series.
    fname = iris.sample_data_path('SOI_Darwin.nc')
    soi = iris.load_cube(fname)

    # Window length for filters.
    window = 121

    # Construct 2-year (24-month) and 7-year (84-month) low pass filters
    # for the SOI data which is monthly.
    wgts24 = low_pass_weights(window, 1. / 24.)
    wgts84 = low_pass_weights(window, 1. / 84.)

    # Apply each filter using the rolling_window method used with the weights
    # keyword argument. A weighted sum is required because the magnitude of
    # the weights are just as important as their relative sizes.
    soi24 = soi.rolling_window('time',
                               iris.analysis.SUM,
                               len(wgts24),
                               weights=wgts24)
    soi84 = soi.rolling_window('time',
                               iris.analysis.SUM,
                               len(wgts84),
                               weights=wgts84)

    # Plot the SOI time series and both filtered versions.
    plt.figure(figsize=(9, 4))
    iplt.plot(soi,
              color='0.7',
              linewidth=1.,
              linestyle='-',
              alpha=1.,
              label='no filter')
    iplt.plot(soi24,
              color='b',
              linewidth=2.,
              linestyle='-',
              alpha=.7,
              label='2-year filter')
    iplt.plot(soi84,
              color='r',
              linewidth=2.,
              linestyle='-',
              alpha=.7,
              label='7-year filter')
    plt.ylim([-4, 4])
    plt.title('Southern Oscillation Index (Darwin Only)')
    plt.xlabel('Time')
    plt.ylabel('SOI')
    plt.legend(fontsize=10)
    iplt.show()
Ejemplo n.º 22
0
def main():
    fname = iris.sample_data_path('air_temp.pp')

    temperature_orig = iris.load_cube(fname)

    temperature_noisy = copy.deepcopy(temperature_orig)
    sx = (temperature_orig.data.shape)[0]
    sy = (temperature_orig.data.shape)[1]
    gaussian_noise = np.random.normal(loc=0.0, scale=5, size=(sx, sy))
    gaussian_noise[:sx/4, :] = 0
    gaussian_noise[(3*sx)/4:, :] = 0
    gaussian_noise[:, sy/4:(3*sy)/4] = 0
    temperature_noisy.data =  2*temperature_noisy.data + gaussian_noise

    #Original data
    plt.figure(figsize=(12, 5))
    plt.subplot(221)
    qplt.contourf(temperature_orig, 15)
    plt.gca().coastlines()

    #Noisy data
    plt.subplot(222)
    qplt.contourf(temperature_noisy, 15)
    plt.gca().coastlines()

    # Plot scatter
    scatter_x = temperature_orig.data.flatten()
    scatter_y = temperature_noisy.data.flatten()
    plt.subplot(223)
    plt.plot(scatter_x, scatter_y, '.', label="scatter")
    coeffs = np.polyfit(scatter_x, scatter_y, 1)
    print(coeffs)
    plt.title("Scatter plot")
    plt.xlabel("orig [K]")
    plt.ylabel("noisy [K]")
    fitted_y = np.polyval(coeffs, scatter_x)
    plt.plot(scatter_x, fitted_y, 'k', label="fit")
    plt.text(np.min(scatter_x), np.max(fitted_y), "\nax+b\na=%f3\nb=%g4" % (coeffs[0], coeffs[1]))

    #Plot
    diff_y = scatter_y - fitted_y
    temperature_diff = copy.deepcopy(temperature_orig)
    temperature_diff.data = diff_y.reshape(temperature_noisy.data.shape)
    temperature_diff.standard_name = None
    temperature_diff.long_name = "Residual from fitted curve"
    temperature_diff.var_name = "Hello_World"


    plt.subplot(224)
    qplt.contourf(temperature_diff, 15)
    plt.gca().coastlines()



    iplt.show()
Ejemplo n.º 23
0
def main():
    fname = iris.sample_data_path('colpex.pp')

    # The list of phenomena of interest
    phenomena = ['air_potential_temperature', 'air_pressure']

    # Define the constraint on standard name and model level
    constraints = [iris.Constraint(phenom, model_level_number=1) for
                   phenom in phenomena]

    air_potential_temperature, air_pressure = iris.load_cubes(fname,
                                                              constraints)

    # Define a coordinate which represents 1000 hPa
    p0 = coords.AuxCoord(1000, long_name='P0', units='hPa')
    # Convert reference pressure 'p0' into the same units as 'air_pressure'
    p0.convert_units(air_pressure.units)

    # Calculate Exner pressure
    exner_pressure = (air_pressure / p0) ** (287.05 / 1005.0)
    # Set the name (the unit is scalar)
    exner_pressure.rename('exner_pressure')

    # Calculate air_temp
    air_temperature = exner_pressure * air_potential_temperature
    # Set the name (the unit is K)
    air_temperature.rename('air_temperature')

    # Now create an iterator which will give us lat lon slices of
    # exner pressure and air temperature in the form
    # (exner_slice, air_temp_slice).
    lat_lon_slice_pairs = iris.iterate.izip(exner_pressure,
                                            air_temperature,
                                            coords=['grid_latitude',
                                                    'grid_longitude'])

    # For the purposes of this example, we only want to demonstrate the first
    # plot.
    lat_lon_slice_pairs = [next(lat_lon_slice_pairs)]

    plt.figure(figsize=(8, 4))
    for exner_slice, air_temp_slice in lat_lon_slice_pairs:
        plt.subplot(121)
        cont = qplt.contourf(exner_slice)

        # The default colorbar has a few too many ticks on it, causing text to
        # overlap. Therefore, limit the number of ticks.
        limit_colorbar_ticks(cont)

        plt.subplot(122)
        cont = qplt.contourf(air_temp_slice)
        limit_colorbar_ticks(cont)
        iplt.show()
Ejemplo n.º 24
0
def main():
    # Enable a future option, to ensure that the grib load works the same way
    # as in future Iris versions.
    iris.FUTURE.strict_grib_load = True

    file_path = iris.sample_data_path('polar_stereo.grib2')
    cube = iris.load_cube(file_path)
    qplt.contourf(cube)
    ax = plt.gca()
    ax.coastlines()
    ax.gridlines()
    iplt.show()
Ejemplo n.º 25
0
def main():
    # Load data into three Cubes, one for each set of PP files
    e1 = iris.load_strict(iris.sample_data_path('E1_north_america.nc'))
    
    a1b = iris.load_strict(iris.sample_data_path('A1B_north_america.nc'))
    
    # load in the global pre-industrial mean temperature, and limit the domain to
    # the same North American region that e1 and a1b are at.
    north_america = iris.Constraint(
                                    longitude=lambda v: 225 <= v <= 315,
                                    latitude=lambda v: 15 <= v <= 60,
                                    )
    pre_industrial = iris.load_strict(iris.sample_data_path('pre-industrial.pp'),
                                  north_america
                                  )
    
    pre_industrial_mean = pre_industrial.collapsed(['latitude', 'longitude'], iris.analysis.MEAN)
    e1_mean = e1.collapsed(['latitude', 'longitude'], iris.analysis.MEAN)
    a1b_mean = a1b.collapsed(['latitude', 'longitude'], iris.analysis.MEAN)
    
    # Show ticks 30 years apart
    plt.gca().xaxis.set_major_locator(mdates.YearLocator(30))
    
    # Label the ticks with year data
    plt.gca().format_xdata = mdates.DateFormatter('%Y')
    
    # Plot the datasets
    qplt.plot(e1_mean, coords=['time'], label='E1 scenario', lw=1.5, color='blue')
    qplt.plot(a1b_mean, coords=['time'], label='A1B-Image scenario', lw=1.5, color='red')
    
    # Draw a horizontal line showing the pre industrial mean
    plt.axhline(y=pre_industrial_mean.data, color='gray', linestyle='dashed', label='pre-industrial', lw=1.5)
    
    # Establish where r and t have the same data, i.e. the observations
    common = numpy.where(a1b_mean.data == e1_mean.data)[0]
    observed = a1b_mean[common]
    
    # Plot the observed data
    qplt.plot(observed, coords=['time'], label='observed', color='black', lw=1.5)
    
    # Add a legend and title
    plt.legend(loc="upper left")
    plt.title('North American mean air temperature', fontsize=18)
    
    plt.xlabel('Time / year')
    
    plt.grid()

    iplt.show()
Ejemplo n.º 26
0
def main():
    fname = iris.sample_data_path('air_temp.pp')
    temperature = iris.load_cube(fname)

    # Plot #1: contourf with axes longitude from -180 to 180
    plt.figure(figsize=(12, 5))
    plt.subplot(121)
    qplt.contourf(temperature, 15)
    plt.gca().coastlines()

    # Plot #2: contourf with axes longitude from 0 to 360
    proj = ccrs.PlateCarree(central_longitude=-180.0)
    plt.subplot(122, projection=proj)
    qplt.contourf(temperature, 15)
    plt.gca().coastlines()
    iplt.show()
Ejemplo n.º 27
0
def main():
    # Load the "total electron content" cube.
    filename = iris.sample_data_path('space_weather.nc')
    cube = iris.load_cube(filename, 'total electron content')

    # Explicitly mask negative electron content.
    cube.data = ma.masked_less(cube.data, 0)

    # Plot the cube using one hundred colour levels.
    qplt.contourf(cube, 100)
    plt.title('Total Electron Content')
    plt.xlabel('longitude / degrees')
    plt.ylabel('latitude / degrees')
    plt.gca().stock_img()
    plt.gca().coastlines()
    iplt.show()
Ejemplo n.º 28
0
def main():
    fname = iris.sample_data_path('hybrid_height.nc')
    theta = iris.load_cube(fname)

    # Extract a single height vs longitude cross-section. N.B. This could easily be changed to
    # extract a specific slice, or even to loop over *all* cross section slices.
    cross_section = next(theta.slices(['grid_longitude', 'model_level_number']))

    qplt.contourf(cross_section, coords=['grid_longitude', 'altitude'])
    iplt.show()

    # Now do the equivalent plot, only against model level
    plt.figure()

    qplt.contourf(cross_section, coords=['grid_longitude', 'model_level_number'])
    iplt.show()
Ejemplo n.º 29
0
def main():
    fname = iris.sample_data_path('air_temp.pp')
    temperature = iris.load_cube(fname)

    # Plot #1: contourf with axes longitude from -180 to 180
    fig = plt.figure(figsize=(12, 5))
    plt.subplot(121)
    qplt.contourf(temperature, 15)
    plt.gca().coastlines()

    # Plot #2: contourf with axes longitude from 0 to 360
    proj = ccrs.PlateCarree(central_longitude=-180.0)
    ax = plt.subplot(122, projection=proj)
    qplt.contourf(temperature, 15)
    plt.gca().coastlines()
    iplt.show()
Ejemplo n.º 30
0
def main():
    fname = iris.sample_data_path('hybrid_height.nc')
    theta = iris.load_cube(fname)

    # Extract a single height vs longitude cross-section. N.B. This could easily be changed to
    # extract a specific slice, or even to loop over *all* cross section slices.
    cross_section = next(theta.slices(['grid_longitude', 'model_level_number']))

    qplt.contourf(cross_section, coords=['grid_longitude', 'altitude'],
                  cmap='RdBu_r')
    iplt.show()

    # Now do the equivalent plot, only against model level
    plt.figure()

    qplt.contourf(cross_section, coords=['grid_longitude', 'model_level_number'],
                  cmap='RdBu_r')
    iplt.show()
Ejemplo n.º 31
0
def main():
    # Load data into three Cubes, one for each set of PP files
    e1 = iris.load_cube(iris.sample_data_path("E1_north_america.nc"))

    a1b = iris.load_cube(iris.sample_data_path("A1B_north_america.nc"))

    # load in the global pre-industrial mean temperature, and limit the domain to
    # the same North American region that e1 and a1b are at.
    north_america = iris.Constraint(longitude=lambda v: 225 <= v <= 315, latitude=lambda v: 15 <= v <= 60)
    pre_industrial = iris.load_cube(iris.sample_data_path("pre-industrial.pp"), north_america)

    pre_industrial_mean = pre_industrial.collapsed(["latitude", "longitude"], iris.analysis.MEAN)
    e1_mean = e1.collapsed(["latitude", "longitude"], iris.analysis.MEAN)
    a1b_mean = a1b.collapsed(["latitude", "longitude"], iris.analysis.MEAN)

    # Show ticks 30 years apart
    plt.gca().xaxis.set_major_locator(mdates.YearLocator(30))

    # Label the ticks with year data
    plt.gca().format_xdata = mdates.DateFormatter("%Y")

    # Plot the datasets
    qplt.plot(e1_mean, coords=["time"], label="E1 scenario", lw=1.5, color="blue")
    qplt.plot(a1b_mean, coords=["time"], label="A1B-Image scenario", lw=1.5, color="red")

    # Draw a horizontal line showing the pre industrial mean
    plt.axhline(y=pre_industrial_mean.data, color="gray", linestyle="dashed", label="pre-industrial", lw=1.5)

    # Establish where r and t have the same data, i.e. the observations
    common = np.where(a1b_mean.data == e1_mean.data)[0]
    observed = a1b_mean[common]

    # Plot the observed data
    qplt.plot(observed, coords=["time"], label="observed", color="black", lw=1.5)

    # Add a legend and title
    plt.legend(loc="upper left")
    plt.title("North American mean air temperature", fontsize=18)

    plt.xlabel("Time / year")

    plt.grid()

    iplt.show()
Ejemplo n.º 32
0
def main():
    # Enable a future option, to ensure that the netcdf load works the same way
    # as in future Iris versions.
    iris.FUTURE.netcdf_promote = True

    # Load the monthly-valued Southern Oscillation Index (SOI) time-series.
    fname = iris.sample_data_path('SOI_Darwin.nc')
    soi = iris.load_cube(fname)

    # Window length for filters.
    window = 121

    # Construct 2-year (24-month) and 7-year (84-month) low pass filters
    # for the SOI data which is monthly.
    wgts24 = low_pass_weights(window, 1. / 24.)
    wgts84 = low_pass_weights(window, 1. / 84.)

    # Apply each filter using the rolling_window method used with the weights
    # keyword argument. A weighted sum is required because the magnitude of
    # the weights are just as important as their relative sizes.
    soi24 = soi.rolling_window('time',
                               iris.analysis.SUM,
                               len(wgts24),
                               weights=wgts24)
    soi84 = soi.rolling_window('time',
                               iris.analysis.SUM,
                               len(wgts84),
                               weights=wgts84)

    # Plot the SOI time series and both filtered versions.
    plt.figure(figsize=(9, 4))
    iplt.plot(soi, color='0.7', linewidth=1., linestyle='-',
              alpha=1., label='no filter')
    iplt.plot(soi24, color='b', linewidth=2., linestyle='-',
              alpha=.7, label='2-year filter')
    iplt.plot(soi84, color='r', linewidth=2., linestyle='-',
              alpha=.7, label='7-year filter')
    plt.ylim([-4, 4])
    plt.title('Southern Oscillation Index (Darwin Only)')
    plt.xlabel('Time')
    plt.ylabel('SOI')
    plt.legend(fontsize=10)
    iplt.show()
Ejemplo n.º 33
0
def main():
    fname = iris.sample_data_path("NAME_output.txt")

    boundary_volc_ash_constraint = iris.Constraint(
        "VOLCANIC_ASH_AIR_CONCENTRATION", flight_level="From FL000 - FL200")

    # Callback shown as None to illustrate where a cube-level callback function
    # would be used if required
    cube = iris.load_cube(fname, boundary_volc_ash_constraint, callback=None)

    # draw contour levels for the data (the top level is just a catch-all)
    levels = (0.0002, 0.002, 0.004, 1e10)
    cs = iplt.contourf(
        cube,
        levels=levels,
        colors=("#80ffff", "#939598", "#e00404"),
    )

    # draw a black outline at the lowest contour to highlight affected areas
    iplt.contour(cube, levels=(levels[0], 100), colors="black")

    # set an extent and a background image for the map
    ax = plt.gca()
    ax.set_extent((-90, 20, 20, 75))
    ax.stock_img("ne_shaded")

    # make a legend, with custom labels, for the coloured contour set
    artists, _ = cs.legend_elements()
    labels = [
        r"$%s < x \leq %s$" % (levels[0], levels[1]),
        r"$%s < x \leq %s$" % (levels[1], levels[2]),
        r"$x > %s$" % levels[2],
    ]
    ax.legend(artists,
              labels,
              title="Ash concentration / g m-3",
              loc="upper left")

    time = cube.coord("time")
    time_date = time.units.num2date(time.points[0]).strftime(UTC_format)
    plt.title("Volcanic ash concentration forecast\nvalid at %s" % time_date)

    iplt.show()
Ejemplo n.º 34
0
def visualization(cube_regrid):
    # support NetCDF
    iris.FUTURE.netcdf_promote = True
    print cube_regrid
    fig2 = plt.figure()
    fig2.suptitle('Oceanic Meridional Energy Transport in 1993 (GLORYS2V3)')
    # Set up axes and title
    #ax = plt.subplot(projection=ccrs.PlateCarree())
    ax = plt.axes(projection=ccrs.PlateCarree())
    # Set limits
    ax.set_global()
    # Draw coastlines
    ax.coastlines()
    # set gridlines and ticks
    gl = ax.gridlines(crs=ccrs.PlateCarree(),
                      draw_labels=True,
                      linewidth=1,
                      color='gray',
                      alpha=0.5,
                      linestyle='--')
    gl.xlabels_top = False
    gl.xlabel_style = {'size': 11, 'color': 'gray'}
    #gl.xlines = False
    #gl.set_xticks()
    #gl.set_yticks()
    gl.xformatter = LONGITUDE_FORMATTER
    gl.ylabel_style = {'size': 11, 'color': 'gray'}
    #ax.ylabels_left = False
    gl.yformatter = LATITUDE_FORMATTER
    # plot with Iris quickplot pcolormesh
    cs = iplt.pcolormesh(cube_regrid / 1000,
                         cmap='coolwarm',
                         vmin=-0.5,
                         vmax=0.5)
    # Add a citation to the plot.
    #iplt.citation(iris.plot.BREWER_CITE)
    cbar = fig2.colorbar(cs,
                         extend='both',
                         orientation='horizontal',
                         shrink=1.0)
    cbar.set_label('PW (1E+15W)')
    iplt.show()
    fig2.savefig(output_path + os.sep + 'OMET_GLORYS2V3.jpg', dpi=500)
Ejemplo n.º 35
0
def main():
    # Enable a future option, to ensure that the netcdf load works the same way
    # as in future Iris versions.
    iris.FUTURE.netcdf_promote = True

    # Load the "total electron content" cube.
    filename = iris.sample_data_path('space_weather.nc')
    cube = iris.load_cube(filename, 'total electron content')

    # Explicitly mask negative electron content.
    cube.data = ma.masked_less(cube.data, 0)

    # Plot the cube using one hundred colour levels.
    qplt.contourf(cube, 100)
    plt.title('Total Electron Content')
    plt.xlabel('longitude / degrees')
    plt.ylabel('latitude / degrees')
    plt.gca().stock_img()
    plt.gca().coastlines()
    iplt.show()
def visualization(cube_regrid, year):
    print "Visualize the data on PlateCarree map!"
    logging.info("Visualize the data on PlateCarree map!")
    # support NetCDF
    iris.FUTURE.netcdf_promote = True
    print cube_regrid
    fig2 = plt.figure()
    fig2.suptitle('Oceanic Meridional Energy Transport in %d (ORAS4)' % (year))
    # Set up axes and title
    ax = plt.axes(projection=ccrs.PlateCarree())
    # Set limits
    ax.set_global()
    # Draw coastlines
    ax.coastlines()
    # set gridlines and ticks
    gl = ax.gridlines(crs=ccrs.PlateCarree(),
                      draw_labels=True,
                      linewidth=1,
                      color='gray',
                      alpha=0.5,
                      linestyle='--')
    gl.xlabels_top = False
    gl.xlabel_style = {'size': 11, 'color': 'gray'}
    #gl.xlines = False
    #gl.set_xticks()
    #gl.set_yticks()
    gl.xformatter = LONGITUDE_FORMATTER
    gl.ylabel_style = {'size': 11, 'color': 'gray'}
    #ax.ylabels_left = False
    gl.yformatter = LATITUDE_FORMATTER
    # plot with Iris quickplot pcolormesh
    cs = iplt.pcolormesh(cube_regrid, cmap='coolwarm', vmin=-0.7, vmax=0.7)
    cbar = fig2.colorbar(cs,
                         extend='both',
                         orientation='horizontal',
                         shrink=1.0)
    cbar.set_label('PW (1E+15W)')
    iplt.show()
    fig2.savefig(output_path + os.sep + 'lat-lon' + os.sep +
                 'OMET_ORAS4_lat-lon_%d.png' % (year),
                 dpi=500)
Ejemplo n.º 37
0
def main():

    file_path = iris.sample_data_path(
        '/nfs/a266/data/CMIP5_AFRICA/BC_0.5x0.5/IPSL-CM5A-LR/historical/pr_WFDEI_1979-2013_0.5x0.5_day_IPSL-CM5A-LR_africa_historical_r1i1p1_full.nc'
    )

    cube = iris.load_cube(file_path)

    cube_wafr = cube.intersection(latitude=(-10.0, 10.0),
                                  longitude=(4.0, 25.0))

    iris.coord_categorisation.add_year(cube_wafr, 'time', name='year')

    iris.coord_categorisation.add_month_number(cube_wafr,
                                               'time',
                                               name='month_number')

    iris.coord_categorisation.add_season(cube_wafr, 'time', name='season')

    SPELL_COUNT = Aggregator('spell_count',
                             count_spells,
                             units_func=lambda units: 1)

    threshold_rainfall = 0.1

    spell_days = 10

    dry_periods = cube.collapsed('time',
                                 SPELL_COUNT,
                                 threshold=threshold_rainfall,
                                 spell_length=spell_days)

    dry_periods.rename('Number of 10-days dry spells in 35 years')

    qplt.contourf(dry_periods, cmap='RdYlBu_r')

    plt.gca().coastlines()

    iplt.show()
Ejemplo n.º 38
0
def main():
    # Load the three files of sample NEMO data.
    fname = iris.sample_data_path("NEMO/nemo_1m_*.nc")
    cubes = iris.load(fname)

    # Some attributes are unique to each file and must be blanked
    # to allow concatenation.
    differing_attrs = ["file_name", "name", "timeStamp", "TimeStamp"]
    for cube in cubes:
        for attribute in differing_attrs:
            cube.attributes[attribute] = ""

    # The cubes still cannot be concatenated because their time dimension is
    # time_counter rather than time. time needs to be promoted to allow
    # concatenation.
    for cube in cubes:
        promote_aux_coord_to_dim_coord(cube, "time")

    # The cubes can now be concatenated into a single time series.
    cube = cubes.concatenate_cube()

    # Generate a time series plot of a single point
    plt.figure()
    y_point_index = 100
    x_point_index = 100
    qplt.plot(cube[:, y_point_index, x_point_index], "o-")

    # Include the point's position in the plot's title
    lat_point = cube.coord("latitude").points[y_point_index, x_point_index]
    lat_string = "{:.3f}\u00B0 {}".format(abs(lat_point),
                                          "N" if lat_point > 0.0 else "S")
    lon_point = cube.coord("longitude").points[y_point_index, x_point_index]
    lon_string = "{:.3f}\u00B0 {}".format(abs(lon_point),
                                          "E" if lon_point > 0.0 else "W")
    plt.title("{} at {} {}".format(cube.long_name.capitalize(), lat_string,
                                   lon_string))

    iplt.show()
Ejemplo n.º 39
0
def main():
    # Enable a future option, to ensure that the netcdf load works the same way
    # as in future Iris versions.
    iris.FUTURE.netcdf_promote = True

    # Load data
    filepath = iris.sample_data_path('orca2_votemper.nc')
    cube = iris.load_cube(filepath)

    # Choose plot projections
    projections = {}
    projections['Mollweide'] = ccrs.Mollweide()
    projections['PlateCarree'] = ccrs.PlateCarree()
    projections['NorthPolarStereo'] = ccrs.NorthPolarStereo()
    projections['Orthographic'] = ccrs.Orthographic(central_longitude=-90,
                                                    central_latitude=45)

    pcarree = projections['PlateCarree']
    # Transform cube to target projection
    new_cube, extent = iris.analysis.cartography.project(cube,
                                                         pcarree,
                                                         nx=400,
                                                         ny=200)

    # Plot data in each projection
    for name in sorted(projections):
        fig = plt.figure()
        fig.suptitle('ORCA2 Data Projected to {}'.format(name))
        # Set up axes and title
        ax = plt.subplot(projection=projections[name])
        # Set limits
        ax.set_global()
        # plot with Iris quickplot pcolormesh
        qplt.pcolormesh(new_cube)
        # Draw coastlines
        ax.coastlines()

        iplt.show()
Ejemplo n.º 40
0
def main():
    # Load the three files of sample NEMO data.
    fname = iris.sample_data_path('NEMO/nemo_1m_*.nc')
    cubes = iris.load(fname)

    # Some attributes are unique to each file and must be blanked
    # to allow concatenation.
    differing_attrs = ['file_name', 'name', 'timeStamp', 'TimeStamp']
    for cube in cubes:
        for attribute in differing_attrs:
            cube.attributes[attribute] = ''

    # The cubes still cannot be concatenated because their time dimension is
    # time_counter rather than time. time needs to be promoted to allow
    # concatenation.
    for cube in cubes:
        promote_aux_coord_to_dim_coord(cube, 'time')

    # The cubes can now be concatenated into a single time series.
    cube = cubes.concatenate_cube()

    # Generate a time series plot of a single point
    plt.figure()
    y_point_index = 100
    x_point_index = 100
    qplt.plot(cube[:, y_point_index, x_point_index], 'o-')

    # Include the point's position in the plot's title
    lat_point = cube.coord('latitude').points[y_point_index, x_point_index]
    lat_string = '{:.3f}\u00B0 {}'.format(abs(lat_point),
                                          'N' if lat_point > 0. else 'S')
    lon_point = cube.coord('longitude').points[y_point_index, x_point_index]
    lon_string = '{:.3f}\u00B0 {}'.format(abs(lon_point),
                                          'E' if lon_point > 0. else 'W')
    plt.title('{} at {} {}'.format(cube.long_name.capitalize(), lat_string,
                                   lon_string))
    iplt.show()
Ejemplo n.º 41
0
def main():
    # Enable a future option, to ensure that the netcdf load works the same way
    # as in future Iris versions.
    iris.FUTURE.netcdf_promote = True

    # Load some test data.
    fname = iris.sample_data_path('rotated_pole.nc')
    air_pressure = iris.load_cube(fname)

    # Plot #1: Point plot showing data values & a colorbar
    plt.figure()
    points = qplt.points(air_pressure, c=air_pressure.data)
    cb = plt.colorbar(points, orientation='horizontal')
    cb.set_label(air_pressure.units)
    plt.gca().coastlines()
    iplt.show()

    # Plot #2: Contourf of the point based data
    plt.figure()
    qplt.contourf(air_pressure, 15)
    plt.gca().coastlines()
    iplt.show()

    # Plot #3: Contourf overlayed by coloured point data
    plt.figure()
    qplt.contourf(air_pressure)
    iplt.points(air_pressure, c=air_pressure.data)
    plt.gca().coastlines()
    iplt.show()

    # For the purposes of this example, add some bounds to the latitude
    # and longitude
    air_pressure.coord('grid_latitude').guess_bounds()
    air_pressure.coord('grid_longitude').guess_bounds()

    # Plot #4: Block plot
    plt.figure()
    plt.axes(projection=ccrs.PlateCarree())
    iplt.pcolormesh(air_pressure)
    plt.gca().stock_img()
    plt.gca().coastlines()
    iplt.show()
Ejemplo n.º 42
0
def main():
    # Load some test data.
    fname = iris.sample_data_path("rotated_pole.nc")
    air_pressure = iris.load_cube(fname)

    # Plot #1: Point plot showing data values & a colorbar
    plt.figure()
    points = qplt.points(air_pressure, c=air_pressure.data)
    cb = plt.colorbar(points, orientation="horizontal")
    cb.set_label(air_pressure.units)
    plt.gca().coastlines()
    iplt.show()

    # Plot #2: Contourf of the point based data
    plt.figure()
    qplt.contourf(air_pressure, 15)
    plt.gca().coastlines()
    iplt.show()

    # Plot #3: Contourf overlayed by coloured point data
    plt.figure()
    qplt.contourf(air_pressure)
    iplt.points(air_pressure, c=air_pressure.data)
    plt.gca().coastlines()
    iplt.show()

    # For the purposes of this example, add some bounds to the latitude
    # and longitude
    air_pressure.coord("grid_latitude").guess_bounds()
    air_pressure.coord("grid_longitude").guess_bounds()

    # Plot #4: Block plot
    plt.figure()
    plt.axes(projection=ccrs.PlateCarree())
    iplt.pcolormesh(air_pressure)
    plt.gca().stock_img()
    plt.gca().coastlines()
    iplt.show()
Ejemplo n.º 43
0
def main():
    # extract surface temperature cubes which have an ensemble member coordinate, adding appropriate lagged ensemble metadata
    surface_temp = iris.load_cube(iris.sample_data_path('GloSea4', 'ensemble_???.pp'),
                  iris.Constraint('surface_temperature', realization=lambda value: True),
                  callback=realization_metadata,
                  )

    # ----------------------------------------------------------------------------------------------------------------
    # Plot #1: Ensemble postage stamps
    # ----------------------------------------------------------------------------------------------------------------

    # for the purposes of this example, take the last time element of the cube
    last_timestep = surface_temp[:, -1, :, :]
   
    # Make 50 evenly spaced levels which span the dataset
    contour_levels = np.linspace(np.min(last_timestep.data), np.max(last_timestep.data), 50)
    
    # Create a wider than normal figure to support our many plots
    plt.figure(figsize=(12, 6), dpi=100)
    
    # Also manually adjust the spacings which are used when creating subplots
    plt.gcf().subplots_adjust(hspace=0.05, wspace=0.05, top=0.95, bottom=0.05, left=0.075, right=0.925)
    
    # iterate over all possible latitude longitude slices
    for cube in last_timestep.slices(['latitude', 'longitude']):
        
        # get the ensemble member number from the ensemble coordinate
        ens_member = cube.coord('realization').points[0]
    
        # plot the data in a 4x4 grid, with each plot's position in the grid being determined by ensemble member number
        # the special case for the 13th ensemble member is to have the plot at the bottom right
        if ens_member == 13:
            plt.subplot(4, 4, 16)
        else:
            plt.subplot(4, 4, ens_member+1)
    
        cf = iplt.contourf(cube, contour_levels)
    
        # add coastlines
        plt.gca().coastlines()
    
    # make an axes to put the shared colorbar in
    colorbar_axes = plt.gcf().add_axes([0.35, 0.1, 0.3, 0.05])
    colorbar = plt.colorbar(cf, colorbar_axes, orientation='horizontal')
    colorbar.set_label('%s' % last_timestep.units)
    
    # limit the colorbar to 8 tick marks
    import matplotlib.ticker
    colorbar.locator = matplotlib.ticker.MaxNLocator(8)
    colorbar.update_ticks()
    
    # get the time for the entire plot
    time_coord = last_timestep.coord('time')
    time = time_coord.units.num2date(time_coord.points[0])
    
    # set a global title for the postage stamps with the date formated by "monthname year"
    plt.suptitle('Surface temperature ensemble forecasts for %s' % time.strftime('%B %Y'))
    
    iplt.show()


    # ---------------------------------------------------------------------------------------------------------------- 
    # Plot #2: ENSO plumes
    # ----------------------------------------------------------------------------------------------------------------
    
    # Nino 3.4 lies between: 170W and 120W, 5N and 5S, so define a constraint which matches this
    nino_3_4_constraint = iris.Constraint(longitude=lambda v: -170+360 <= v <= -120+360, latitude=lambda v: -5 <= v <= 5)
    
    nino_cube = surface_temp.extract(nino_3_4_constraint)

    # Subsetting a circular longitude coordinate always results in a circular coordinate, so set the coordinate to be non-circular
    nino_cube.coord('longitude').circular = False
    
    # Calculate the horizontal mean for the nino region
    mean = nino_cube.collapsed(['latitude', 'longitude'], iris.analysis.MEAN)
    
    # Calculate the ensemble mean of the horizontal mean. To do this, remove the "forecast_period" and
    # "forecast_reference_time" coordinates which span both "relalization" and "time".
    mean.remove_coord("forecast_reference_time")
    mean.remove_coord("forecast_period")
    ensemble_mean = mean.collapsed('realization', iris.analysis.MEAN)
    
    # take the ensemble mean from each ensemble member
    mean -= ensemble_mean.data

    plt.figure()
    
    for ensemble_member in mean.slices(['time']):
        # draw each ensemble member as a dashed line in black
        iplt.plot(ensemble_member, '--k')
    
    plt.title('Mean temperature anomaly for ENSO 3.4 region')
    plt.xlabel('Time')
    plt.ylabel('Temperature anomaly / K')
    
    plt.show()
Ejemplo n.º 44
0
def main():
    # Load data into three Cubes, one for each set of NetCDF files.
    e1 = iris.load_cube(iris.sample_data_path('E1_north_america.nc'))

    a1b = iris.load_cube(iris.sample_data_path('A1B_north_america.nc'))

    # load in the global pre-industrial mean temperature, and limit the domain
    # to the same North American region that e1 and a1b are at.
    north_america = iris.Constraint(longitude=lambda v: 225 <= v <= 315,
                                    latitude=lambda v: 15 <= v <= 60)
    pre_industrial = iris.load_cube(iris.sample_data_path('pre-industrial.pp'),
                                    north_america)

    # Generate area-weights array. As e1 and a1b are on the same grid we can
    # do this just once and re-use. This method requires bounds on lat/lon
    # coords, so let's add some in sensible locations using the "guess_bounds"
    # method.
    e1.coord('latitude').guess_bounds()
    e1.coord('longitude').guess_bounds()
    e1_grid_areas = iris.analysis.cartography.area_weights(e1)
    pre_industrial.coord('latitude').guess_bounds()
    pre_industrial.coord('longitude').guess_bounds()
    pre_grid_areas = iris.analysis.cartography.area_weights(pre_industrial)

    # Perform the area-weighted mean for each of the datasets using the
    # computed grid-box areas.
    pre_industrial_mean = pre_industrial.collapsed(['latitude', 'longitude'],
                                                   iris.analysis.MEAN,
                                                   weights=pre_grid_areas)
    e1_mean = e1.collapsed(['latitude', 'longitude'],
                           iris.analysis.MEAN,
                           weights=e1_grid_areas)
    a1b_mean = a1b.collapsed(['latitude', 'longitude'],
                             iris.analysis.MEAN,
                             weights=e1_grid_areas)

    # Show ticks 30 years apart
    plt.gca().xaxis.set_major_locator(mdates.YearLocator(30))

    # Plot the datasets
    qplt.plot(e1_mean, label='E1 scenario', lw=1.5, color='blue')
    qplt.plot(a1b_mean, label='A1B-Image scenario', lw=1.5, color='red')

    # Draw a horizontal line showing the pre-industrial mean
    plt.axhline(y=pre_industrial_mean.data,
                color='gray',
                linestyle='dashed',
                label='pre-industrial',
                lw=1.5)

    # Constrain the period 1860-1999 and extract the observed data from a1b
    constraint = iris.Constraint(
        time=lambda cell: 1860 <= cell.point.year <= 1999)
    with iris.FUTURE.context(cell_datetime_objects=True):
        observed = a1b_mean.extract(constraint)
        # Assert that this data set is the same as the e1 scenario:
        # they share data up to the 1999 cut off.
        assert np.all(
            np.isclose(observed.data,
                       e1_mean.extract(constraint).data))

    # Plot the observed data
    qplt.plot(observed, label='observed', color='black', lw=1.5)

    # Add a legend and title
    plt.legend(loc="upper left")
    plt.title('North American mean air temperature', fontsize=18)

    plt.xlabel('Time / year')

    plt.grid()

    iplt.show()
def main():
    # Load data into three Cubes, one for each set of NetCDF files.
    e1 = iris.load_cube(iris.sample_data_path('E1_north_america.nc'))

    a1b = iris.load_cube(iris.sample_data_path('A1B_north_america.nc'))

    # load in the global pre-industrial mean temperature, and limit the domain
    # to the same North American region that e1 and a1b are at.
    north_america = iris.Constraint(longitude=lambda v: 225 <= v <= 315,
                                    latitude=lambda v: 15 <= v <= 60)
    pre_industrial = iris.load_cube(iris.sample_data_path('pre-industrial.pp'),
                                    north_america)

    # Generate area-weights array. As e1 and a1b are on the same grid we can
    # do this just once and re-use. This method requires bounds on lat/lon
    # coords, so let's add some in sensible locations using the "guess_bounds"
    # method.
    e1.coord('latitude').guess_bounds()
    e1.coord('longitude').guess_bounds()
    e1_grid_areas = iris.analysis.cartography.area_weights(e1)
    pre_industrial.coord('latitude').guess_bounds()
    pre_industrial.coord('longitude').guess_bounds()
    pre_grid_areas = iris.analysis.cartography.area_weights(pre_industrial)

    # Perform the area-weighted mean for each of the datasets using the
    # computed grid-box areas.
    pre_industrial_mean = pre_industrial.collapsed(['latitude', 'longitude'],
                                                   iris.analysis.MEAN,
                                                   weights=pre_grid_areas)
    e1_mean = e1.collapsed(['latitude', 'longitude'],
                           iris.analysis.MEAN,
                           weights=e1_grid_areas)
    a1b_mean = a1b.collapsed(['latitude', 'longitude'],
                             iris.analysis.MEAN,
                             weights=e1_grid_areas)

    # Show ticks 30 years apart
    plt.gca().xaxis.set_major_locator(mdates.YearLocator(30))

    # Plot the datasets
    qplt.plot(e1_mean, label='E1 scenario', lw=1.5, color='blue')
    qplt.plot(a1b_mean, label='A1B-Image scenario', lw=1.5, color='red')

    # Draw a horizontal line showing the pre-industrial mean
    plt.axhline(y=pre_industrial_mean.data, color='gray', linestyle='dashed',
                label='pre-industrial', lw=1.5)

    # Establish where r and t have the same data, i.e. the observations
    observed = a1b_mean[:np.argmin(np.isclose(a1b_mean.data, e1_mean.data))]

    # Plot the observed data
    qplt.plot(observed, label='observed', color='black', lw=1.5)

    # Add a legend and title
    plt.legend(loc="upper left")
    plt.title('North American mean air temperature', fontsize=18)

    plt.xlabel('Time / year')

    plt.grid()

    iplt.show()
Ejemplo n.º 46
0
def main():
    # Load e1 and a1 using the callback to update the metadata
    e1 = iris.load_cube(iris.sample_data_path('E1.2098.pp'),
                        callback=cop_metadata_callback)
    a1b = iris.load_cube(iris.sample_data_path('A1B.2098.pp'),
                         callback=cop_metadata_callback)

    # Load the global average data and add an 'Experiment' coord it
    global_avg = iris.load_cube(iris.sample_data_path('pre-industrial.pp'))

    # Define evenly spaced contour levels: -2.5, -1.5, ... 15.5, 16.5 with the
    # specific colours
    levels = np.arange(20) - 2.5
    red = np.array([0, 0, 221, 239, 229, 217, 239, 234, 228, 222, 205, 196,
                    161, 137, 116, 89, 77, 60, 51]) / 256.
    green = np.array([16, 217, 242, 243, 235, 225, 190, 160, 128, 87, 72, 59,
                      33, 21, 29, 30, 30, 29, 26]) / 256.
    blue = np.array([255, 255, 243, 169, 99, 51, 63, 37, 39, 21, 27, 23, 22,
                     26, 29, 28, 27, 25, 22]) / 256.

    # Put those colours into an array which can be passed to contourf as the
    # specific colours for each level
    colors = np.array([red, green, blue]).T

    # Subtract the global

    # Iterate over each latitude longitude slice for both e1 and a1b scenarios
    # simultaneously
    for e1_slice, a1b_slice in zip(e1.slices(['latitude', 'longitude']),
                                   a1b.slices(['latitude', 'longitude'])):

        time_coord = a1b_slice.coord('time')

        # Calculate the difference from the mean
        delta_e1 = e1_slice - global_avg
        delta_a1b = a1b_slice - global_avg

        # Make a wider than normal figure to house two maps side-by-side
        fig = plt.figure(figsize=(12, 5))

        # Get the time datetime from the coordinate
        time = time_coord.units.num2date(time_coord.points[0])
        # Set a title for the entire figure, giving the time in a nice format
        # of "MonthName Year". Also, set the y value for the title so that it
        # is not tight to the top of the plot.
        fig.suptitle(
            'Annual Temperature Predictions for ' + time.strftime("%Y"),
            y=0.9,
            fontsize=18)

        # Add the first subplot showing the E1 scenario
        plt.subplot(121)
        plt.title('HadGEM2 E1 Scenario',  fontsize=10)
        iplt.contourf(delta_e1, levels, colors=colors, extend='both')
        plt.gca().coastlines()
        # get the current axes' subplot for use later on
        plt1_ax = plt.gca()

        # Add the second subplot showing the A1B scenario
        plt.subplot(122)
        plt.title('HadGEM2 A1B-Image Scenario',  fontsize=10)
        contour_result = iplt.contourf(delta_a1b, levels, colors=colors,
                                       extend='both')
        plt.gca().coastlines()
        # get the current axes' subplot for use later on
        plt2_ax = plt.gca()

        # Now add a colourbar who's leftmost point is the same as the leftmost
        # point of the left hand plot and rightmost point is the rightmost
        # point of the right hand plot

        # Get the positions of the 2nd plot and the left position of the 1st
        # plot
        left, bottom, width, height = plt2_ax.get_position().bounds
        first_plot_left = plt1_ax.get_position().bounds[0]

        # the width of the colorbar should now be simple
        width = left - first_plot_left + width

        # Add axes to the figure, to place the colour bar
        colorbar_axes = fig.add_axes([first_plot_left, bottom + 0.07,
                                      width, 0.03])

        # Add the colour bar
        cbar = plt.colorbar(contour_result, colorbar_axes,
                            orientation='horizontal')

        # Label the colour bar and add ticks
        cbar.set_label(e1_slice.units)
        cbar.ax.tick_params(length=0)

        iplt.show()
Ejemplo n.º 47
0
def main():
    # Enable a future option, to ensure that the netcdf load works the same way
    # as in future Iris versions.
    iris.FUTURE.netcdf_promote = True

    # Load a sample air temperatures sequence.
    file_path = iris.sample_data_path('E1_north_america.nc')
    temperatures = iris.load_cube(file_path)

    # Create a year-number coordinate from the time information.
    iris.coord_categorisation.add_year(temperatures, 'time')

    # Create a sample anomaly field for one chosen year, by extracting that
    # year and subtracting the time mean.
    sample_year = 1982
    year_temperature = temperatures.extract(iris.Constraint(year=sample_year))
    time_mean = temperatures.collapsed('time', iris.analysis.MEAN)
    anomaly = year_temperature - time_mean

    # Construct a plot title string explaining which years are involved.
    years = temperatures.coord('year').points
    plot_title = 'Temperature anomaly'
    plot_title += '\n{} differences from {}-{} average.'.format(
        sample_year, years[0], years[-1])

    # Define scaling levels for the logarithmic colouring.
    minimum_log_level = 0.1
    maximum_scale_level = 3.0

    # Use a standard colour map which varies blue-white-red.
    # For suitable options, see the 'Diverging colormaps' section in:
    # http://matplotlib.org/examples/color/colormaps_reference.html
    anom_cmap = 'bwr'

    # Create a 'logarithmic' data normalization.
    anom_norm = mcols.SymLogNorm(linthresh=minimum_log_level,
                                 linscale=0,
                                 vmin=-maximum_scale_level,
                                 vmax=maximum_scale_level)
    # Setting "linthresh=minimum_log_level" makes its non-logarithmic
    # data range equal to our 'zero band'.
    # Setting "linscale=0" maps the whole zero band to the middle colour value
    # (i.e. 0.5), which is the neutral point of a "diverging" style colormap.

    # Create an Axes, specifying the map projection.
    plt.axes(projection=ccrs.LambertConformal())

    # Make a pseudocolour plot using this colour scheme.
    mesh = iplt.pcolormesh(anomaly, cmap=anom_cmap, norm=anom_norm)

    # Add a colourbar, with extensions to show handling of out-of-range values.
    bar = plt.colorbar(mesh, orientation='horizontal', extend='both')

    # Set some suitable fixed "logarithmic" colourbar tick positions.
    tick_levels = [-3, -1, -0.3, 0.0, 0.3, 1, 3]
    bar.set_ticks(tick_levels)

    # Modify the tick labels so that the centre one shows "+/-<minumum-level>".
    tick_levels[3] = r'$\pm${:g}'.format(minimum_log_level)
    bar.set_ticklabels(tick_levels)

    # Label the colourbar to show the units.
    bar.set_label('[{}, log scale]'.format(anomaly.units))

    # Add coastlines and a title.
    plt.gca().coastlines()
    plt.title(plot_title)

    # Display the result.
    iplt.show()
Ejemplo n.º 48
0
def main():
    # Load e1 and a1 using the callback to update the metadata
    e1 = iris.load_cube(iris.sample_data_path('E1.2098.pp'),
                        callback=cop_metadata_callback)
    a1b = iris.load_cube(iris.sample_data_path('A1B.2098.pp'),
                         callback=cop_metadata_callback)

    # Load the global average data and add an 'Experiment' coord it
    global_avg = iris.load_cube(iris.sample_data_path('pre-industrial.pp'))

    # Define evenly spaced contour levels: -2.5, -1.5, ... 15.5, 16.5 with the
    # specific colours
    levels = np.arange(20) - 2.5
    red = np.array([
        0, 0, 221, 239, 229, 217, 239, 234, 228, 222, 205, 196, 161, 137, 116,
        89, 77, 60, 51
    ]) / 256.
    green = np.array([
        16, 217, 242, 243, 235, 225, 190, 160, 128, 87, 72, 59, 33, 21, 29, 30,
        30, 29, 26
    ]) / 256.
    blue = np.array([
        255, 255, 243, 169, 99, 51, 63, 37, 39, 21, 27, 23, 22, 26, 29, 28, 27,
        25, 22
    ]) / 256.

    # Put those colours into an array which can be passed to contourf as the
    # specific colours for each level
    colors = np.array([red, green, blue]).T

    # Subtract the global

    # Iterate over each latitude longitude slice for both e1 and a1b scenarios
    # simultaneously
    for e1_slice, a1b_slice in zip(e1.slices(['latitude', 'longitude']),
                                   a1b.slices(['latitude', 'longitude'])):

        time_coord = a1b_slice.coord('time')

        # Calculate the difference from the mean
        delta_e1 = e1_slice - global_avg
        delta_a1b = a1b_slice - global_avg

        # Make a wider than normal figure to house two maps side-by-side
        fig = plt.figure(figsize=(12, 5))

        # Get the time datetime from the coordinate
        time = time_coord.units.num2date(time_coord.points[0])
        # Set a title for the entire figure, giving the time in a nice format
        # of "MonthName Year". Also, set the y value for the title so that it
        # is not tight to the top of the plot.
        fig.suptitle('Annual Temperature Predictions for ' +
                     time.strftime("%Y"),
                     y=0.9,
                     fontsize=18)

        # Add the first subplot showing the E1 scenario
        plt.subplot(121)
        plt.title('HadGEM2 E1 Scenario', fontsize=10)
        iplt.contourf(delta_e1,
                      levels,
                      colors=colors,
                      linewidth=0,
                      extend='both')
        plt.gca().coastlines()
        # get the current axes' subplot for use later on
        plt1_ax = plt.gca()

        # Add the second subplot showing the A1B scenario
        plt.subplot(122)
        plt.title('HadGEM2 A1B-Image Scenario', fontsize=10)
        contour_result = iplt.contourf(delta_a1b,
                                       levels,
                                       colors=colors,
                                       linewidth=0,
                                       extend='both')
        plt.gca().coastlines()
        # get the current axes' subplot for use later on
        plt2_ax = plt.gca()

        # Now add a colourbar who's leftmost point is the same as the leftmost
        # point of the left hand plot and rightmost point is the rightmost
        # point of the right hand plot

        # Get the positions of the 2nd plot and the left position of the 1st
        # plot
        left, bottom, width, height = plt2_ax.get_position().bounds
        first_plot_left = plt1_ax.get_position().bounds[0]

        # the width of the colorbar should now be simple
        width = left - first_plot_left + width

        # Add axes to the figure, to place the colour bar
        colorbar_axes = fig.add_axes(
            [first_plot_left, bottom + 0.07, width, 0.03])

        # Add the colour bar
        cbar = plt.colorbar(contour_result,
                            colorbar_axes,
                            orientation='horizontal')

        # Label the colour bar and add ticks
        cbar.set_label(e1_slice.units)
        cbar.ax.tick_params(length=0)

        iplt.show()
Ejemplo n.º 49
0
def main():
    # Load data into three Cubes, one for each set of NetCDF files.
    e1 = iris.load_cube(iris.sample_data_path('E1_north_america.nc'))

    a1b = iris.load_cube(iris.sample_data_path('A1B_north_america.nc'))

    # load in the global pre-industrial mean temperature, and limit the domain
    # to the same North American region that e1 and a1b are at.
    north_america = iris.Constraint(longitude=lambda v: 225 <= v <= 315,
                                    latitude=lambda v: 15 <= v <= 60)
    pre_industrial = iris.load_cube(iris.sample_data_path('pre-industrial.pp'),
                                    north_america)

    # Generate area-weights array. As e1 and a1b are on the same grid we can
    # do this just once and re-use. This method requires bounds on lat/lon
    # coords, so let's add some in sensible locations using the "guess_bounds"
    # method.
    e1.coord('latitude').guess_bounds()
    e1.coord('longitude').guess_bounds()
    e1_grid_areas = iris.analysis.cartography.area_weights(e1)
    pre_industrial.coord('latitude').guess_bounds()
    pre_industrial.coord('longitude').guess_bounds()
    pre_grid_areas = iris.analysis.cartography.area_weights(pre_industrial)

    # Perform the area-weighted mean for each of the datasets using the
    # computed grid-box areas.
    pre_industrial_mean = pre_industrial.collapsed(['latitude', 'longitude'],
                                                   iris.analysis.MEAN,
                                                   weights=pre_grid_areas)
    e1_mean = e1.collapsed(['latitude', 'longitude'],
                           iris.analysis.MEAN,
                           weights=e1_grid_areas)
    a1b_mean = a1b.collapsed(['latitude', 'longitude'],
                             iris.analysis.MEAN,
                             weights=e1_grid_areas)

    # Plot the datasets
    qplt.plot(e1_mean, label='E1 scenario', lw=1.5, color='blue')
    qplt.plot(a1b_mean, label='A1B-Image scenario', lw=1.5, color='red')

    # Draw a horizontal line showing the pre-industrial mean
    plt.axhline(y=pre_industrial_mean.data, color='gray', linestyle='dashed',
                label='pre-industrial', lw=1.5)

    # Constrain the period 1860-1999 and extract the observed data from a1b
    constraint = iris.Constraint(time=lambda
                                 cell: 1860 <= cell.point.year <= 1999)
    observed = a1b_mean.extract(constraint)
    # Assert that this data set is the same as the e1 scenario:
    # they share data up to the 1999 cut off.
    assert np.all(np.isclose(observed.data,
                             e1_mean.extract(constraint).data))

    # Plot the observed data
    qplt.plot(observed, label='observed', color='black', lw=1.5)

    # Add a legend and title
    plt.legend(loc="upper left")
    plt.title('North American mean air temperature', fontsize=18)

    plt.xlabel('Time / year')

    plt.grid()

    iplt.show()
Ejemplo n.º 50
0
def make_plot(projection_name, projection_crs):

    # Create a matplotlib Figure.
    plt.figure()

    # Add a matplotlib Axes, specifying the required display projection.
    # NOTE: specifying 'projection' (a "cartopy.crs.Projection") makes the
    # resulting Axes a "cartopy.mpl.geoaxes.GeoAxes", which supports plotting
    # in different coordinate systems.
    ax = plt.axes(projection=projection_crs)

    # Set display limits to include a set region of latitude * longitude.
    # (Note: Cartopy-specific).
    ax.set_extent((-80.0, 20.0, 10.0, 80.0), crs=crs_latlon)

    # Add coastlines and meridians/parallels (Cartopy-specific).
    ax.coastlines(linewidth=0.75, color='navy')
    ax.gridlines(crs=crs_latlon, linestyle='-')

    # Plot the first dataset as a pseudocolour filled plot.
    maindata_filepath = iris.sample_data_path('rotated_pole.nc')
    main_data = iris.load_cube(maindata_filepath)
    # NOTE: iplt.pcolormesh calls "pyplot.pcolormesh", passing in a coordinate
    # system with the 'transform' keyword:  This enables the Axes (a cartopy
    # GeoAxes) to reproject the plot into the display projection.
    iplt.pcolormesh(main_data, cmap='RdBu_r')

    # Overplot the other dataset (which has a different grid), as contours.
    overlay_filepath = iris.sample_data_path('space_weather.nc')
    overlay_data = iris.load_cube(overlay_filepath, 'total electron content')
    # NOTE: as above, "iris.plot.contour" calls "pyplot.contour" with a
    # 'transform' keyword, enabling Cartopy reprojection.
    iplt.contour(overlay_data, 20,
                 linewidths=2.0, colors='darkgreen', linestyles='-')

    # Draw a margin line, some way in from the border of the 'main' data...
    # First calculate rectangle corners, 7% in from each corner of the data.
    x_coord, y_coord = main_data.coord(axis='x'), main_data.coord(axis='y')
    x_start, x_end = np.min(x_coord.points), np.max(x_coord.points)
    y_start, y_end = np.min(y_coord.points), np.max(y_coord.points)
    margin = 0.07
    margin_fractions = np.array([margin, 1.0 - margin])
    x_lower, x_upper = x_start + (x_end - x_start) * margin_fractions
    y_lower, y_upper = y_start + (y_end - y_start) * margin_fractions
    box_x_points = x_lower + (x_upper - x_lower) * np.array([0, 1, 1, 0, 0])
    box_y_points = y_lower + (y_upper - y_lower) * np.array([0, 0, 1, 1, 0])
    # Get the Iris coordinate sytem of the X coordinate (Y should be the same).
    cs_data1 = x_coord.coord_system
    # Construct an equivalent Cartopy coordinate reference system ("crs").
    crs_data1 = cs_data1.as_cartopy_crs()
    # Draw the rectangle in this crs, with matplotlib "pyplot.plot".
    # NOTE: the 'transform' keyword specifies a non-display coordinate system
    # for the plot points (as used by the "iris.plot" functions).
    plt.plot(box_x_points, box_y_points, transform=crs_data1,
             linewidth=2.0, color='white', linestyle='--')

    # Mark some particular places with a small circle and a name label...
    # Define some test points with latitude and longitude coordinates.
    city_data = [('London', 51.5072, 0.1275),
                 ('Halifax, NS', 44.67, -63.61),
                 ('Reykjavik', 64.1333, -21.9333)]
    # Place a single marker point and a text annotation at each place.
    for name, lat, lon in city_data:
        plt.plot(lon, lat, marker='o', markersize=7.0, markeredgewidth=2.5,
                 markerfacecolor='black', markeredgecolor='white',
                 transform=crs_latlon)
        # NOTE: the "plt.annotate call" does not have a "transform=" keyword,
        # so for this one we transform the coordinates with a Cartopy call.
        at_x, at_y = ax.projection.transform_point(lon, lat,
                                                   src_crs=crs_latlon)
        plt.annotate(
            name, xy=(at_x, at_y), xytext=(30, 20), textcoords='offset points',
            color='black', backgroundcolor='white', size='large',
            arrowprops=dict(arrowstyle='->', color='white', linewidth=2.5))

    # Add a title, and display.
    plt.title('A pseudocolour plot on the {} projection,\n'
              'with overlaid contours.'.format(projection_name))
    iplt.show()
Ejemplo n.º 51
0
def main():
    # Load data into three Cubes, one for each set of PP files
    e1 = iris.load_cube(iris.sample_data_path('E1_north_america.nc'))

    a1b = iris.load_cube(iris.sample_data_path('A1B_north_america.nc'))

    # load in the global pre-industrial mean temperature, and limit the domain to
    # the same North American region that e1 and a1b are at.
    north_america = iris.Constraint(
                                    longitude=lambda v: 225 <= v <= 315,
                                    latitude=lambda v: 15 <= v <= 60,
                                    )
    pre_industrial = iris.load_cube(iris.sample_data_path('pre-industrial.pp'),
                                    north_america)

    # Generate area-weights array. As e1 and a1b are on the same grid we can
    # do this just once and re-use.
    # This method requires bounds on lat/lon coords, so first we must guess
    # these.
    e1.coord('latitude').guess_bounds()
    e1.coord('longitude').guess_bounds()
    e1_grid_areas = iris.analysis.cartography.area_weights(e1)
    pre_industrial.coord('latitude').guess_bounds()
    pre_industrial.coord('longitude').guess_bounds()
    pre_grid_areas = iris.analysis.cartography.area_weights(pre_industrial)

    # Now perform an area-weighted collape for each dataset:
    pre_industrial_mean = pre_industrial.collapsed(['latitude', 'longitude'],
                                                   iris.analysis.MEAN,
                                                   weights=pre_grid_areas)
    e1_mean = e1.collapsed(['latitude', 'longitude'],
                           iris.analysis.MEAN,
                           weights=e1_grid_areas)
    a1b_mean = a1b.collapsed(['latitude', 'longitude'],
                             iris.analysis.MEAN,
                             weights=e1_grid_areas)

    # Show ticks 30 years apart
    plt.gca().xaxis.set_major_locator(mdates.YearLocator(30))

    # Label the ticks with year data
    plt.gca().format_xdata = mdates.DateFormatter('%Y')

    # Plot the datasets
    qplt.plot(e1_mean, label='E1 scenario', lw=1.5, color='blue')
    qplt.plot(a1b_mean, label='A1B-Image scenario', lw=1.5, color='red')

    # Draw a horizontal line showing the pre industrial mean
    plt.axhline(y=pre_industrial_mean.data, color='gray', linestyle='dashed', label='pre-industrial', lw=1.5)

    # Establish where r and t have the same data, i.e. the observations
    common = np.where(a1b_mean.data == e1_mean.data)[0]
    observed = a1b_mean[common]

    # Plot the observed data
    qplt.plot(observed, label='observed', color='black', lw=1.5)

    # Add a legend and title
    plt.legend(loc="upper left")
    plt.title('North American mean air temperature', fontsize=18)

    plt.xlabel('Time / year')

    plt.grid()

    iplt.show()