def testSetPlotFilledContourLargeRange(self):
   cube = iris.load_cube(iris.sample_data_path('air_temp.pp'))
   plt.subplot(1,2,1)
   cc.setPlot(cube, "Filled Contour", "brewer_Blues_09", 15, True, 400, 200)
   plt.subplot(1,2,2)
   qplt.contourf(cube, 15, cmap="brewer_Blues_09", vmin=200, vmax=400)
   
   plt.show()
 def test_warn_deprecated(self):
     sample_path = _temp_file(self.sample_dir)
     with mock.patch('warnings.warn') as warn:
         sample_data_path(os.path.basename(sample_path))
         self.assertEqual(warn.call_count, 1)
         (warn_msg, warn_exception), _ = warn.call_args
         msg = 'iris.config.SAMPLE_DATA_DIR was deprecated'
         self.assertTrue(warn_msg.startswith(msg))
         self.assertEqual(warn_exception, IrisDeprecation)
def main():
    # Load data into three Cubes, one for each set of PP files
    e1 = iris.load_strict(iris.sample_data_path('E1_north_america.nc'))
    
    a1b = iris.load_strict(iris.sample_data_path('A1B_north_america.nc'))
    
    # load in the global pre-industrial mean temperature, and limit the domain to
    # the same North American region that e1 and a1b are at.
    north_america = iris.Constraint(
                                    longitude=lambda v: 225 <= v <= 315,
                                    latitude=lambda v: 15 <= v <= 60,
                                    )
    pre_industrial = iris.load_strict(iris.sample_data_path('pre-industrial.pp'),
                                  north_america
                                  )
    
    pre_industrial_mean = pre_industrial.collapsed(['latitude', 'longitude'], iris.analysis.MEAN)
    e1_mean = e1.collapsed(['latitude', 'longitude'], iris.analysis.MEAN)
    a1b_mean = a1b.collapsed(['latitude', 'longitude'], iris.analysis.MEAN)
    
    # Show ticks 30 years apart
    plt.gca().xaxis.set_major_locator(mdates.YearLocator(30))
    
    # Label the ticks with year data
    plt.gca().format_xdata = mdates.DateFormatter('%Y')
    
    # Plot the datasets
    qplt.plot(e1_mean, coords=['time'], label='E1 scenario', lw=1.5, color='blue')
    qplt.plot(a1b_mean, coords=['time'], label='A1B-Image scenario', lw=1.5, color='red')
    
    # Draw a horizontal line showing the pre industrial mean
    plt.axhline(y=pre_industrial_mean.data, color='gray', linestyle='dashed', label='pre-industrial', lw=1.5)
    
    # Establish where r and t have the same data, i.e. the observations
    common = numpy.where(a1b_mean.data == e1_mean.data)[0]
    observed = a1b_mean[common]
    
    # Plot the observed data
    qplt.plot(observed, coords=['time'], label='observed', color='black', lw=1.5)
    
    # Add a legend and title
    plt.legend(loc="upper left")
    plt.title('North American mean air temperature', fontsize=18)
    
    plt.xlabel('Time / year')
    
    plt.grid()

    iplt.show()
Example #4
0
def main():
    # load a single cube of surface temperature between +/- 5 latitude
    fname = iris.sample_data_path("ostia_monthly.nc")
    cube = iris.load_cube(
        fname,
        iris.Constraint("surface_temperature", latitude=lambda v: -5 < v < 5),
    )

    # Take the mean over latitude
    cube = cube.collapsed("latitude", iris.analysis.MEAN)

    # Now that we have our data in a nice way, lets create the plot
    # contour with 20 levels
    qplt.contourf(cube, 20)

    # Put a custom label on the y axis
    plt.ylabel("Time / years")

    # Stop matplotlib providing clever axes range padding
    plt.axis("tight")

    # As we are plotting annual variability, put years as the y ticks
    plt.gca().yaxis.set_major_locator(mdates.YearLocator())

    # And format the ticks to just show the year
    plt.gca().yaxis.set_major_formatter(mdates.DateFormatter("%Y"))

    iplt.show()
Example #5
0
def main():
    # Enable a future option, to ensure that the netcdf load works the same way
    # as in future Iris versions.
    iris.FUTURE.netcdf_promote = True

    # Load some test data.
    fname = iris.sample_data_path('hybrid_height.nc')
    theta = iris.load_cube(fname, 'air_potential_temperature')

    # Extract a single height vs longitude cross-section. N.B. This could
    # easily be changed to extract a specific slice, or even to loop over *all*
    # cross section slices.
    cross_section = next(theta.slices(['grid_longitude',
                                       'model_level_number']))

    qplt.contourf(cross_section,
                  coords=['grid_longitude', 'altitude'],
                  cmap='RdBu_r')
    iplt.show()

    # Now do the equivalent plot, only against model level
    plt.figure()

    qplt.contourf(cross_section,
                  coords=['grid_longitude', 'model_level_number'],
                  cmap='RdBu_r')
    iplt.show()
def main():
    fname = iris.sample_data_path('/nfs/a266/data/CMIP5_AFRICA/BC_0.5x0.5/IPSL-CM5A-LR/historical/tasmax_WFDEI_1979-2013_0.5x0.5_day_IPSL-CM5A-LR_africa_historical_r1i1p1_full.nc')
    soi = iris.load_cube(fname)

    # Window length for filters.
    window = 121

    # Construct 2-year (24-month) and 7-year (84-month) low pass filters
    # for the SOI data which is monthly.
    wgts24 = low_pass_weights(window, 1. / 24.)
    wgts84 = low_pass_weights(window, 1. / 84.)
    soi24 = soi.rolling_window('time',
                               iris.analysis.SUM,
                               len(wgts24),
                               weights=wgts24)
    soi84 =  soi.rolling_window('time',
                                iris.analysis.SUM,
                                len(wgts84),
                                weights=wgts84)

    # Plot the SOI time series and both filtered versions.
    plt.figure(figsize=(9, 4))
    iplt.plot(soi, color='0.7', linewidth=1., linestyle='-',alpha=1., label='no filter')
    iplt.plot(soi24, color='b', linewidth=2., linestyle='-',alpha=.7, label='2-year filter')
    iplt.plot(soi84, color='r', linewidth=2., linestyle='-',alpha=.7, label='7-year filter')
    plt.ylim([-4, 4])
    plt.title('West Africa')
    plt.xlabel('Time')
    plt.ylabel('SOI')
    plt.legend(fontsize=10)
    iplt.show()
Example #7
0
def main():
    # Load some test data.
    fname = iris.sample_data_path("hybrid_height.nc")
    theta = iris.load_cube(fname, "air_potential_temperature")

    # Extract a single height vs longitude cross-section. N.B. This could
    # easily be changed to extract a specific slice, or even to loop over *all*
    # cross section slices.
    cross_section = next(theta.slices(["grid_longitude",
                                       "model_level_number"]))

    qplt.contourf(cross_section,
                  coords=["grid_longitude", "altitude"],
                  cmap="RdBu_r")
    iplt.show()

    # Now do the equivalent plot, only against model level
    plt.figure()

    qplt.contourf(
        cross_section,
        coords=["grid_longitude", "model_level_number"],
        cmap="RdBu_r",
    )
    iplt.show()
def main():
    fname = iris.sample_data_path('NAME_output.txt')

    boundary_volc_ash_constraint = iris.Constraint(
        'VOLCANIC_ASH_AIR_CONCENTRATION', flight_level='From FL000 - FL200')

    # Callback shown as None to illustrate where a cube-level callback function would be used if required
    cube = iris.load_strict(fname, boundary_volc_ash_constraint, callback=None)

    map = iplt.map_setup(lon_range=[-70, 20],
                         lat_range=[20, 75],
                         resolution='i')

    map.drawcoastlines()

    iplt.contourf(cube,
                  levels=(0.0002, 0.002, 0.004, 1),
                  colors=('#80ffff', '#939598', '#e00404'),
                  extend='max')

    time = cube.coord('time')
    time_date = time.units.num2date(time.points[0]).strftime(UTC_format)
    plt.title('Volcanic ash concentration forecast\nvalid at %s' % time_date)

    plt.show()
Example #9
0
def main():
    # Enable a future option, to ensure that the netcdf load works the same way
    # as in future Iris versions.
    iris.FUTURE.netcdf_promote = True

    # load a single cube of surface temperature between +/- 5 latitude
    fname = iris.sample_data_path('ostia_monthly.nc')
    cube = iris.load_cube(
        fname,
        iris.Constraint('surface_temperature', latitude=lambda v: -5 < v < 5))

    # Take the mean over latitude
    cube = cube.collapsed('latitude', iris.analysis.MEAN)

    # Now that we have our data in a nice way, lets create the plot
    # contour with 20 levels
    qplt.contourf(cube, 20)

    # Put a custom label on the y axis
    plt.ylabel('Time / years')

    # Stop matplotlib providing clever axes range padding
    plt.axis('tight')

    # As we are plotting annual variability, put years as the y ticks
    plt.gca().yaxis.set_major_locator(mdates.YearLocator())

    # And format the ticks to just show the year
    plt.gca().yaxis.set_major_formatter(mdates.DateFormatter('%Y'))

    iplt.show()
Example #10
0
def main():
    fname = iris.sample_data_path('ostia_monthly.nc')
    
    # load a single cube of surface temperature between +/- 5 latitude
    cube = iris.load_cube(fname, iris.Constraint('surface_temperature', latitude=lambda v: -5 < v < 5))
    
    # Take the mean over latitude
    cube = cube.collapsed('latitude', iris.analysis.MEAN)
    
    # Now that we have our data in a nice way, lets create the plot
    # contour with 20 levels
    qplt.contourf(cube, 20)
    
    # Put a custom label on the y axis 
    plt.ylabel('Time / years')
    
    # Stop matplotlib providing clever axes range padding
    plt.axis('tight')
    
    # As we are plotting annual variability, put years as the y ticks
    plt.gca().yaxis.set_major_locator(mdates.YearLocator())
    
    # And format the ticks to just show the year
    plt.gca().yaxis.set_major_formatter(mdates.DateFormatter('%Y'))
    
    plt.show()
 def testSetPlotpcolormeshLargeRange(self):
   cube = iris.load_cube(iris.sample_data_path('air_temp.pp'))
   plt.subplot(1,2,1)
   cc.setPlot(cube, "pcolormesh", "brewer_Blues_09", 15, True, 400, 200)
   plt.subplot(1,2,2)
   qplt.pcolormesh(cube, cmap="brewer_Blues_09", vmin=200, vmax=400)
   plt.show()
 def testSetPlotpcolormeshPlain(self):
   cube = iris.load_cube(iris.sample_data_path('air_temp.pp'))
   plt.subplot(1,2,1)
   cc.setPlot(cube, "pcolormesh", "Automatic", 15, False, None, None)
   plt.subplot(1,2,2)
   qplt.pcolormesh(cube)
   plt.show()
 def testSetPlotContourPlain(self):
   cube = iris.load_cube(iris.sample_data_path('air_temp.pp'))
   plt.subplot(1,2,1)
   cc.setPlot(cube, "Contour", "Automatic", 15, False, None, None)
   plt.subplot(1,2,2)
   qplt.contour(cube, 15)
   plt.show()
Example #14
0
def runme():

    # Load a cube into Iris
    filename = iris.sample_data_path("A1B.2098.pp")
    cube = iris.load_cube(filename)
    cube.coord(axis="x").guess_bounds()
    cube.coord(axis="y").guess_bounds()

    # Plot the cube with Iris, just to see it.
    qplt.contourf(cube)
    qplt.plt.gca().coastlines()
    qplt.show()

    # Export as GeoTIFF (shouldn't have to write to a physical file)
    iris.experimental.raster.export_geotiff(cube, 'temp.geotiff')
    data = open('temp.geotiff', "rb").read()

    # Publish to geoserver
    server = "localhost:8082"
    username, password = '******', 'geoserver'
    connect_to_server(server, username, password)

    workspace = "iris_test_ws"
    if not exists_workspace(server, workspace):
        create_workspace(server, workspace)

    coveragestore = "iris_test_cs"
    if not exists_coveragestore(server, workspace, coveragestore):
        create_coveragestore(server, workspace, coveragestore)

    filename = "file.geotiff"
    upload_file(server, workspace, coveragestore, filename, data)

    # Tell geoserver it's global EPSG:4326. Shouldn't need this eventually.
    coverage = coveragestore  # (they get the same name from geoserver)
    data = '<coverage>'\
                '<srs>EPSG:4326</srs>'\
                '<nativeCRS>EPSG:4326</nativeCRS>'\
                ' <nativeBoundingBox>'\
                    '<minx>-180.0</minx>'\
                    '<maxx>180.0</maxx>'\
                    '<miny>-90.0</miny>'\
                    '<maxy>90.0</maxy>'\
                    '<crs>EPSG:4326</crs>'\
                '</nativeBoundingBox>'\
                '<enabled>true</enabled>'\
            '</coverage>'
    update_coverage(server, workspace, coveragestore, coverage, data)

    # Use the new WMS service as a background image!
    wms_server = '{server}/{workspace}/wms?service=WMS'.format(
        server=server, workspace=workspace)
    layers = '{workspace}:{coveragestore}'.format(workspace=workspace,
                                                  coveragestore=coveragestore)

    plt.axes(projection=ccrs.PlateCarree())
    plt.gca().set_extent([-40, 40, 20, 80])
    wms_image(wms_server, layers)
    plt.gca().coastlines()
    plt.show()
Example #15
0
 def test_glob_ok(self):
     sample_path = _temp_file(self.sample_dir)
     sample_glob = '?' + os.path.basename(sample_path)[1:]
     with mock.patch('iris_sample_data.path', self.sample_dir):
         result = sample_data_path(sample_glob)
         self.assertEqual(result, os.path.join(self.sample_dir,
                                               sample_glob))
Example #16
0
def main():
    fname = iris.sample_data_path("NAME_output.txt")

    boundary_volc_ash_constraint = iris.Constraint("VOLCANIC_ASH_AIR_CONCENTRATION", flight_level="From FL000 - FL200")

    # Callback shown as None to illustrate where a cube-level callback function would be used if required
    cube = iris.load_cube(fname, boundary_volc_ash_constraint, callback=None)

    # draw contour levels for the data (the top level is just a catch-all)
    levels = (0.0002, 0.002, 0.004, 1e10)
    cs = iplt.contourf(cube, levels=levels, colors=("#80ffff", "#939598", "#e00404"))

    # draw a black outline at the lowest contour to highlight affected areas
    iplt.contour(cube, levels=(levels[0], 100), colors="black")

    # set an extent and a background image for the map
    ax = plt.gca()
    ax.set_extent((-90, 20, 20, 75))
    ax.stock_img("ne_shaded")

    # make a legend, with custom labels, for the coloured contour set
    artists, _ = cs.legend_elements()
    labels = [
        r"$%s < x \leq %s$" % (levels[0], levels[1]),
        r"$%s < x \leq %s$" % (levels[1], levels[2]),
        r"$x > %s$" % levels[2],
    ]
    ax.legend(artists, labels, title="Ash concentration / g m-3", loc="upper left")

    time = cube.coord("time")
    time_date = time.units.num2date(time.points[0]).strftime(UTC_format)
    plt.title("Volcanic ash concentration forecast\nvalid at %s" % time_date)

    iplt.show()
def main():
    # Load the whole time-sequence as a single cube.
    file_path = iris.sample_data_path("E1_north_america.nc")
    cube = iris.load_cube(file_path)

    # Make an aggregator from the user function.
    SPELL_COUNT = Aggregator(
        "spell_count", count_spells, units_func=lambda units: 1
    )

    # Define the parameters of the test.
    threshold_temperature = 280.0
    spell_years = 5

    # Calculate the statistic.
    warm_periods = cube.collapsed(
        "time",
        SPELL_COUNT,
        threshold=threshold_temperature,
        spell_length=spell_years,
    )
    warm_periods.rename("Number of 5-year warm spells in 240 years")

    # Plot the results.
    qplt.contourf(warm_periods, cmap="RdYlBu_r")
    plt.gca().coastlines()
    iplt.show()
Example #18
0
def main():
    # Enable a future option, to ensure that the netcdf load works the same way
    # as in future Iris versions.
    iris.FUTURE.netcdf_promote = True

    # Load some test data.
    fname = iris.sample_data_path('A1B_north_america.nc')
    cube = iris.load_cube(fname)

    # Extract a single time series at a latitude and longitude point.
    location = next(cube.slices(['time']))

    # Calculate a polynomial fit to the data at this time series.
    x_points = location.coord('time').points
    y_points = location.data
    degree = 2

    p = np.polyfit(x_points, y_points, degree)
    y_fitted = np.polyval(p, x_points)

    # Add the polynomial fit values to the time series to take
    # full advantage of Iris plotting functionality.
    long_name = 'degree_{}_polynomial_fit_of_{}'.format(degree, cube.name())
    fit = iris.coords.AuxCoord(y_fitted, long_name=long_name,
                               units=location.units)
    location.add_aux_coord(fit, 0)

    qplt.plot(location.coord('time'), location, label='data')
    qplt.plot(location.coord('time'),
              location.coord(long_name),
              'g-', label='polynomial fit')
    plt.legend(loc='best')
    plt.title('Trend of US air temperature over time')

    qplt.show()
Example #19
0
def main():
    # Load data
    filepath = iris.sample_data_path('orca2_votemper.nc')
    cube = iris.load_cube(filepath)

    # Choose plot projections
    projections = {}
    projections['Mollweide'] = ccrs.Mollweide()
    projections['PlateCarree'] = ccrs.PlateCarree()
    projections['NorthPolarStereo'] = ccrs.NorthPolarStereo()
    projections['Orthographic'] = ccrs.Orthographic(central_longitude=-90,
                                                    central_latitude=45)

    pcarree = projections['PlateCarree']
    # Transform cube to target projection
    new_cube, extent = iris.analysis.cartography.project(cube, pcarree,
                                                         nx=400, ny=200)

    # Plot data in each projection
    for name in sorted(projections):
        fig = plt.figure()
        fig.suptitle('ORCA2 Data Projected to {}'.format(name))
        # Set up axes and title
        ax = plt.subplot(projection=projections[name])
        # Set limits
        ax.set_global()
        # plot with Iris quickplot pcolormesh
        qplt.pcolormesh(new_cube)
        # Draw coastlines
        ax.coastlines()

        plt.show()
Example #20
0
def main():
    # Enable a future option, to ensure that the netcdf load works the same way
    # as in future Iris versions.
    iris.FUTURE.netcdf_promote = True

    # Load the whole time-sequence as a single cube.
    file_path = iris.sample_data_path('E1_north_america.nc')
    cube = iris.load_cube(file_path)

    # Make an aggregator from the user function.
    SPELL_COUNT = Aggregator('spell_count',
                             count_spells,
                             units_func=lambda units: 1)

    # Define the parameters of the test.
    threshold_temperature = 280.0
    spell_years = 5

    # Calculate the statistic.
    warm_periods = cube.collapsed('time', SPELL_COUNT,
                                  threshold=threshold_temperature,
                                  spell_length=spell_years)
    warm_periods.rename('Number of 5-year warm spells in 240 years')

    # Plot the results.
    qplt.contourf(warm_periods, cmap='RdYlBu_r')
    plt.gca().coastlines()
    iplt.show()
def main():
    fname = iris.sample_data_path('air_temp.pp')

    # Load exactly one cube from the given file.
    temperature = iris.load_cube(fname)

    # We only want a small number of latitudes, so filter some out
    # using "extract".
    temperature = temperature.extract(
        iris.Constraint(latitude=lambda cell: 68 <= cell < 78))

    for cube in temperature.slices('longitude'):  

        # Create a string label to identify this cube (i.e. latitude: value).
        cube_label = 'latitude: %s' % cube.coord('latitude').points[0]

        # Plot the cube, and associate it with a label.
        qplt.plot(cube, label=cube_label)

    # Add the legend with 2 columns.
    plt.legend(ncol=2)

    # Put a grid on the plot.
    plt.grid(True)

    # Tell matplotlib not to extend the plot axes range to nicely
    # rounded numbers.
    plt.axis('tight')

    # Finally, show it.
    plt.show()
 def testPlot1DwithoutGridlines(self):
   cube = iris.load_cube(iris.sample_data_path('SOI_Darwin.nc'))
   plt.subplot(1,2,1)
   cc.plot1D(cube, False)
   plt.subplot(1,2,2)
   qplt.plot(cube)
   plt.show()
Example #23
0
def main():
    # Enable a future option, to ensure that the netcdf load works the same way
    # as in future Iris versions.
    iris.FUTURE.netcdf_promote = True

    # Load some test data.
    fname = iris.sample_data_path('hybrid_height.nc')
    theta = iris.load_cube(fname, 'air_potential_temperature')

    # Extract a single height vs longitude cross-section. N.B. This could
    # easily be changed to extract a specific slice, or even to loop over *all*
    # cross section slices.
    cross_section = next(theta.slices(['grid_longitude',
                                       'model_level_number']))

    qplt.contourf(cross_section, coords=['grid_longitude', 'altitude'],
                  cmap='RdBu_r')
    iplt.show()

    # Now do the equivalent plot, only against model level
    plt.figure()

    qplt.contourf(cross_section,
                  coords=['grid_longitude', 'model_level_number'],
                  cmap='RdBu_r')
    iplt.show()
Example #24
0
def main():
    fname = iris.sample_data_path('air_temp.pp')
    temperature = iris.load_cube(fname)

    qplt.contourf(temperature, 15)
    plt.gca().coastlines()
    plt.show()
Example #25
0
 def __init__(self, initial_value=''):
     if initial_value == '':
         try:
             initial_value = iris.sample_data_path('')
         except ValueError:
             initial_value = ''
     # Define the file system path for input files.
     self._path = ipywidgets.Text(description='Path:',
                                  value=initial_value,
                                  width="100%")
     # Observe the path.
     self._path.observe(self._handle_path, names='value')
     # Use default path value to initialise file options.
     options = []
     if os.path.exists(self._path.value):
         options = glob.glob('{}/*'.format(self._path.value))
         options.sort()
     # Defines the files selected to be loaded.
     self._files = ipywidgets.SelectMultiple(description='Files:',
                                             options=OrderedDict([
                                                 (os.path.basename(f), f)
                                                 for f in options
                                             ]),
                                             width="100%")
     self._box = ipywidgets.Box(children=[self._path, self._files],
                                width="100%")
Example #26
0
def main():
    # Load some test data.
    fname = iris.sample_data_path("A1B_north_america.nc")
    cube = iris.load_cube(fname)

    # Extract a single time series at a latitude and longitude point.
    location = next(cube.slices(["time"]))

    # Calculate a polynomial fit to the data at this time series.
    x_points = location.coord("time").points
    y_points = location.data
    degree = 2

    p = np.polyfit(x_points, y_points, degree)
    y_fitted = np.polyval(p, x_points)

    # Add the polynomial fit values to the time series to take
    # full advantage of Iris plotting functionality.
    long_name = "degree_{}_polynomial_fit_of_{}".format(degree, cube.name())
    fit = iris.coords.AuxCoord(y_fitted,
                               long_name=long_name,
                               units=location.units)
    location.add_aux_coord(fit, 0)

    qplt.plot(location.coord("time"), location, label="data")
    qplt.plot(
        location.coord("time"),
        location.coord(long_name),
        "g-",
        label="polynomial fit",
    )
    plt.legend(loc="best")
    plt.title("Trend of US air temperature over time")

    qplt.show()
Example #27
0
 def __init__(self, initial_value=''):
     if initial_value == '':
         try:
             initial_value = iris.sample_data_path('')
         except ValueError:
             initial_value = ''
     # Define the file system path for input files.
     self._path = ipywidgets.Text(
         description='Path:',
         value=initial_value,
         width="100%")
     # Observe the path.
     self._path.observe(self._handle_path, names='value')
     # Use default path value to initialise file options.
     options = []
     if os.path.exists(self._path.value):
         options = glob.glob('{}/*'.format(self._path.value))
         options.sort()
     # Defines the files selected to be loaded.
     self._files = ipywidgets.SelectMultiple(
         description='Files:',
         options=OrderedDict([(os.path.basename(f), f)
                              for f in options]),
         width="100%"
     )
     self._box = ipywidgets.Box(children=[self._path, self._files],
                                width="100%")
def main():
    fname = iris.sample_data_path('air_temp.pp')
    temperature = iris.load_strict(fname)
    
    qplt.contourf(temperature, 15)
    iplt.gcm().drawcoastlines()
    plt.show()
Example #29
0
def main():
    # Load data
    filepath = iris.sample_data_path("orca2_votemper.nc")
    cube = iris.load_cube(filepath)

    # Choose plot projections
    projections = {}
    projections["Mollweide"] = ccrs.Mollweide()
    projections["PlateCarree"] = ccrs.PlateCarree()
    projections["NorthPolarStereo"] = ccrs.NorthPolarStereo()
    projections["Orthographic"] = ccrs.Orthographic(central_longitude=-90,
                                                    central_latitude=45)

    pcarree = projections["PlateCarree"]
    # Transform cube to target projection
    new_cube, extent = iris.analysis.cartography.project(cube,
                                                         pcarree,
                                                         nx=400,
                                                         ny=200)

    # Plot data in each projection
    for name in sorted(projections):
        fig = plt.figure()
        fig.suptitle("ORCA2 Data Projected to {}".format(name))
        # Set up axes and title
        ax = plt.subplot(projection=projections[name])
        # Set limits
        ax.set_global()
        # plot with Iris quickplot pcolormesh
        qplt.pcolormesh(new_cube)
        # Draw coastlines
        ax.coastlines()

        iplt.show()
Example #30
0
def main():
    # Load the u and v components of wind from a pp file
    infile = iris.sample_data_path("wind_speed_lake_victoria.pp")

    uwind = iris.load_cube(infile, "x_wind")
    vwind = iris.load_cube(infile, "y_wind")

    ulon = uwind.coord("longitude")
    vlon = vwind.coord("longitude")

    # The longitude points go from 180 to 540, so subtract 360 from them
    ulon.points = ulon.points - 360.0
    vlon.points = vlon.points - 360.0

    # Create a cube containing the wind speed
    windspeed = (uwind**2 + vwind**2)**0.5
    windspeed.rename("windspeed")

    x = ulon.points
    y = uwind.coord("latitude").points
    u = uwind.data
    v = vwind.data

    # Set up axes to show the lake
    lakes = cfeat.NaturalEarthFeature("physical",
                                      "lakes",
                                      "50m",
                                      facecolor="none")

    plt.figure()
    ax = plt.axes(projection=ccrs.PlateCarree())
    ax.add_feature(lakes)

    # Get the coordinate reference system used by the data
    transform = ulon.coord_system.as_cartopy_projection()

    # Plot the wind speed as a contour plot
    qplt.contourf(windspeed, 20)

    # Add arrows to show the wind vectors
    plt.quiver(x, y, u, v, pivot="middle", transform=transform)

    plt.title("Wind speed over Lake Victoria")
    qplt.show()

    # Normalise the data for uniform arrow size
    u_norm = u / np.sqrt(u**2.0 + v**2.0)
    v_norm = v / np.sqrt(u**2.0 + v**2.0)

    plt.figure()
    ax = plt.axes(projection=ccrs.PlateCarree())
    ax.add_feature(lakes)

    qplt.contourf(windspeed, 20)

    plt.quiver(x, y, u_norm, v_norm, pivot="middle", transform=transform)

    plt.title("Wind speed over Lake Victoria")
    qplt.show()
Example #31
0
def main():
    # Enable a future option, to ensure that the netcdf load works the same way
    # as in future Iris versions.
    iris.FUTURE.netcdf_promote = True

    # Load the gridded temperature and salinity data.
    fname = iris.sample_data_path('atlantic_profiles.nc')
    cubes = iris.load(fname)
    theta, = cubes.extract('sea_water_potential_temperature')
    salinity, = cubes.extract('sea_water_practical_salinity')

    # Extract profiles of temperature and salinity from a particular point in
    # the southern portion of the domain, and limit the depth of the profile
    # to 1000m.
    lon_cons = iris.Constraint(longitude=330.5)
    lat_cons = iris.Constraint(latitude=lambda l: -10 < l < -9)
    depth_cons = iris.Constraint(depth=lambda d: d <= 1000)
    theta_1000m = theta.extract(depth_cons & lon_cons & lat_cons)
    salinity_1000m = salinity.extract(depth_cons & lon_cons & lat_cons)

    # Plot these profiles on the same set of axes. In each case we call plot
    # with two arguments, the cube followed by the depth coordinate. Putting
    # them in this order places the depth coordinate on the y-axis.
    # The first plot is in the default axes. We'll use the same color for the
    # curve and its axes/tick labels.
    plt.figure(figsize=(5, 6))
    temperature_color = (.3, .4, .5)
    ax1 = plt.gca()
    iplt.plot(theta_1000m, theta_1000m.coord('depth'), linewidth=2,
              color=temperature_color, alpha=.75)
    ax1.set_xlabel('Potential Temperature / K', color=temperature_color)
    ax1.set_ylabel('Depth / m')
    for ticklabel in ax1.get_xticklabels():
        ticklabel.set_color(temperature_color)
    # To plot salinity in the same axes we use twiny(). We'll use a different
    # color to identify salinity.
    salinity_color = (.6, .1, .15)
    ax2 = plt.gca().twiny()
    iplt.plot(salinity_1000m, salinity_1000m.coord('depth'), linewidth=2,
              color=salinity_color, alpha=.75)
    ax2.set_xlabel('Salinity / PSU', color=salinity_color)
    for ticklabel in ax2.get_xticklabels():
        ticklabel.set_color(salinity_color)
    plt.tight_layout()
    iplt.show()

    # Now plot a T-S diagram using scatter. We'll use all the profiles here,
    # and each point will be coloured according to its depth.
    plt.figure(figsize=(6, 6))
    depth_values = theta.coord('depth').points
    for s, t in iris.iterate.izip(salinity, theta, coords='depth'):
        iplt.scatter(s, t, c=depth_values, marker='+', cmap='RdYlBu_r')
    ax = plt.gca()
    ax.set_xlabel('Salinity / PSU')
    ax.set_ylabel('Potential Temperature / K')
    cb = plt.colorbar(orientation='horizontal')
    cb.set_label('Depth / m')
    plt.tight_layout()
    iplt.show()
 def testSetPlotContourLargeRange(self):
   cube = iris.load_cube(iris.sample_data_path('air_temp.pp'))
   plt.subplot(1,2,1)
   cc.setPlot(cube, "Contour", "brewer_Blues_09", 15, True, 400, 200)
   plt.subplot(1,2,2)
   contours = qplt.contour(cube, 15, cmap="brewer_Blues_09", vmin=200, vmax=400)
   plt.clabel(contours, inline=1, fontsize=8)
   plt.show()
def main():
    file_path = iris.sample_data_path('polar_stereo.grib2')
    cube = iris.load_cube(file_path)
    qplt.contourf(cube)
    ax = plt.gca()
    ax.coastlines()
    ax.gridlines()
    plt.show()
Example #34
0
def main():
    file_path = iris.sample_data_path("toa_brightness_stereographic.nc")
    cube = iris.load_cube(file_path)
    qplt.contourf(cube)
    ax = plt.gca()
    ax.coastlines()
    ax.gridlines()
    iplt.show()
Example #35
0
def runme():

    # Load a cube into Iris
    filename = iris.sample_data_path("A1B.2098.pp")
    cube = iris.load_cube(filename)
    cube.coord(axis="x").guess_bounds()
    cube.coord(axis="y").guess_bounds()

    # Plot the cube with Iris, just to see it.
    qplt.contourf(cube)
    qplt.plt.gca().coastlines()
    qplt.show()
    
    # Export as GeoTIFF (shouldn't have to write to a physical file)
    iris.experimental.raster.export_geotiff(cube, 'temp.geotiff')
    data = open('temp.geotiff', "rb").read()

    # Publish to geoserver
    server = "localhost:8082"
    username, password = '******', 'geoserver'
    connect_to_server(server, username, password)

    workspace = "iris_test_ws"
    if not exists_workspace(server, workspace):
        create_workspace(server, workspace)
    
    coveragestore = "iris_test_cs"
    if not exists_coveragestore(server, workspace, coveragestore):
        create_coveragestore(server, workspace, coveragestore)

    filename = "file.geotiff"
    upload_file(server, workspace, coveragestore, filename, data)
    
    # Tell geoserver it's global EPSG:4326. Shouldn't need this eventually.
    coverage = coveragestore  # (they get the same name from geoserver)
    data = '<coverage>'\
                '<srs>EPSG:4326</srs>'\
                '<nativeCRS>EPSG:4326</nativeCRS>'\
                ' <nativeBoundingBox>'\
                    '<minx>-180.0</minx>'\
                    '<maxx>180.0</maxx>'\
                    '<miny>-90.0</miny>'\
                    '<maxy>90.0</maxy>'\
                    '<crs>EPSG:4326</crs>'\
                '</nativeBoundingBox>'\
                '<enabled>true</enabled>'\
            '</coverage>'
    update_coverage(server, workspace, coveragestore, coverage, data)

    # Use the new WMS service as a background image!
    wms_server = '{server}/{workspace}/wms?service=WMS'.format(server=server, workspace=workspace)
    layers = '{workspace}:{coveragestore}'.format(workspace=workspace, coveragestore=coveragestore)
    
    plt.axes(projection=ccrs.PlateCarree())
    plt.gca().set_extent([-40, 40, 20, 80])
    wms_image(wms_server, layers)
    plt.gca().coastlines()
    plt.show()
 def test_call(self):
     try:
         sample_file = _temp_file(self.sample_dir)
         with mock.patch('iris_sample_data.path', self.sample_dir):
             import iris_sample_data
             result = sample_data_path(os.path.basename(sample_file))
             self.assertEqual(result, sample_file)
     except ImportError:
         pass
Example #37
0
def main():
    # Load the u and v components of wind from a pp file
    infile = iris.sample_data_path('wind_speed_lake_victoria.pp')

    uwind = iris.load_cube(infile, 'x_wind')
    vwind = iris.load_cube(infile, 'y_wind')

    ulon = uwind.coord('longitude')
    vlon = vwind.coord('longitude')

    # The longitude points go from 180 to 540, so subtract 360 from them
    ulon.points = ulon.points - 360.0
    vlon.points = vlon.points - 360.0

    # Create a cube containing the wind speed
    windspeed = (uwind ** 2 + vwind ** 2) ** 0.5
    windspeed.rename('windspeed')

    x = ulon.points
    y = uwind.coord('latitude').points
    u = uwind.data
    v = vwind.data

    # Set up axes to show the lake
    lakes = cfeat.NaturalEarthFeature('physical', 'lakes', '50m',
                                      facecolor='none')

    plt.figure()
    ax = plt.axes(projection=ccrs.PlateCarree())
    ax.add_feature(lakes)

    # Get the coordinate reference system used by the data
    transform = ulon.coord_system.as_cartopy_projection()

    # Plot the wind speed as a contour plot
    qplt.contourf(windspeed, 20)

    # Add arrows to show the wind vectors
    plt.quiver(x, y, u, v, pivot='middle', transform=transform)

    plt.title("Wind speed over Lake Victoria")
    qplt.show()

    # Normalise the data for uniform arrow size
    u_norm = u / np.sqrt(u ** 2.0 + v ** 2.0)
    v_norm = v / np.sqrt(u ** 2.0 + v ** 2.0)

    plt.figure()
    ax = plt.axes(projection=ccrs.PlateCarree())
    ax.add_feature(lakes)

    qplt.contourf(windspeed, 20)

    plt.quiver(x, y, u_norm, v_norm, pivot='middle', transform=transform)

    plt.title("Wind speed over Lake Victoria")
    qplt.show()
Example #38
0
def main():
    # Load the monthly-valued Southern Oscillation Index (SOI) time-series.
    fname = iris.sample_data_path("SOI_Darwin.nc")
    soi = iris.load_cube(fname)

    # Window length for filters.
    window = 121

    # Construct 2-year (24-month) and 7-year (84-month) low pass filters
    # for the SOI data which is monthly.
    wgts24 = low_pass_weights(window, 1.0 / 24.0)
    wgts84 = low_pass_weights(window, 1.0 / 84.0)

    # Apply each filter using the rolling_window method used with the weights
    # keyword argument. A weighted sum is required because the magnitude of
    # the weights are just as important as their relative sizes.
    soi24 = soi.rolling_window("time",
                               iris.analysis.SUM,
                               len(wgts24),
                               weights=wgts24)
    soi84 = soi.rolling_window("time",
                               iris.analysis.SUM,
                               len(wgts84),
                               weights=wgts84)

    # Plot the SOI time series and both filtered versions.
    plt.figure(figsize=(9, 4))
    iplt.plot(
        soi,
        color="0.7",
        linewidth=1.0,
        linestyle="-",
        alpha=1.0,
        label="no filter",
    )
    iplt.plot(
        soi24,
        color="b",
        linewidth=2.0,
        linestyle="-",
        alpha=0.7,
        label="2-year filter",
    )
    iplt.plot(
        soi84,
        color="r",
        linewidth=2.0,
        linestyle="-",
        alpha=0.7,
        label="7-year filter",
    )
    plt.ylim([-4, 4])
    plt.title("Southern Oscillation Index (Darwin Only)")
    plt.xlabel("Time")
    plt.ylabel("SOI")
    plt.legend(fontsize=10)
    iplt.show()
Example #39
0
def main():
    # Load data into three Cubes, one for each set of PP files
    e1 = iris.load_cube(iris.sample_data_path("E1_north_america.nc"))

    a1b = iris.load_cube(iris.sample_data_path("A1B_north_america.nc"))

    # load in the global pre-industrial mean temperature, and limit the domain to
    # the same North American region that e1 and a1b are at.
    north_america = iris.Constraint(longitude=lambda v: 225 <= v <= 315, latitude=lambda v: 15 <= v <= 60)
    pre_industrial = iris.load_cube(iris.sample_data_path("pre-industrial.pp"), north_america)

    pre_industrial_mean = pre_industrial.collapsed(["latitude", "longitude"], iris.analysis.MEAN)
    e1_mean = e1.collapsed(["latitude", "longitude"], iris.analysis.MEAN)
    a1b_mean = a1b.collapsed(["latitude", "longitude"], iris.analysis.MEAN)

    # Show ticks 30 years apart
    plt.gca().xaxis.set_major_locator(mdates.YearLocator(30))

    # Label the ticks with year data
    plt.gca().format_xdata = mdates.DateFormatter("%Y")

    # Plot the datasets
    qplt.plot(e1_mean, coords=["time"], label="E1 scenario", lw=1.5, color="blue")
    qplt.plot(a1b_mean, coords=["time"], label="A1B-Image scenario", lw=1.5, color="red")

    # Draw a horizontal line showing the pre industrial mean
    plt.axhline(y=pre_industrial_mean.data, color="gray", linestyle="dashed", label="pre-industrial", lw=1.5)

    # Establish where r and t have the same data, i.e. the observations
    common = np.where(a1b_mean.data == e1_mean.data)[0]
    observed = a1b_mean[common]

    # Plot the observed data
    qplt.plot(observed, coords=["time"], label="observed", color="black", lw=1.5)

    # Add a legend and title
    plt.legend(loc="upper left")
    plt.title("North American mean air temperature", fontsize=18)

    plt.xlabel("Time / year")

    plt.grid()

    iplt.show()
Example #40
0
def main():
    fname = iris.sample_data_path("colpex.pp")

    # The list of phenomena of interest
    phenomena = ["air_potential_temperature", "air_pressure"]

    # Define the constraint on standard name and model level
    constraints = [
        iris.Constraint(phenom, model_level_number=1) for phenom in phenomena
    ]

    air_potential_temperature, air_pressure = iris.load_cubes(
        fname, constraints
    )

    # Define a coordinate which represents 1000 hPa
    p0 = coords.AuxCoord(1000, long_name="P0", units="hPa")
    # Convert reference pressure 'p0' into the same units as 'air_pressure'
    p0.convert_units(air_pressure.units)

    # Calculate Exner pressure
    exner_pressure = (air_pressure / p0) ** (287.05 / 1005.0)
    # Set the name (the unit is scalar)
    exner_pressure.rename("exner_pressure")

    # Calculate air_temp
    air_temperature = exner_pressure * air_potential_temperature
    # Set the name (the unit is K)
    air_temperature.rename("air_temperature")

    # Now create an iterator which will give us lat lon slices of
    # exner pressure and air temperature in the form
    # (exner_slice, air_temp_slice).
    lat_lon_slice_pairs = iris.iterate.izip(
        exner_pressure,
        air_temperature,
        coords=["grid_latitude", "grid_longitude"],
    )

    # For the purposes of this example, we only want to demonstrate the first
    # plot.
    lat_lon_slice_pairs = [next(lat_lon_slice_pairs)]

    plt.figure(figsize=(8, 4))
    for exner_slice, air_temp_slice in lat_lon_slice_pairs:
        plt.subplot(121)
        cont = qplt.contourf(exner_slice)

        # The default colorbar has a few too many ticks on it, causing text to
        # overlap. Therefore, limit the number of ticks.
        limit_colorbar_ticks(cont)

        plt.subplot(122)
        cont = qplt.contourf(air_temp_slice)
        limit_colorbar_ticks(cont)
        iplt.show()
Example #41
0
def main():
    # Enable a future option, to ensure that the netcdf load works the same way
    # as in future Iris versions.
    iris.FUTURE.netcdf_promote = True

    # Load the monthly-valued Southern Oscillation Index (SOI) time-series.
    fname = iris.sample_data_path('SOI_Darwin.nc')
    soi = iris.load_cube(fname)

    # Window length for filters.
    window = 121

    # Construct 2-year (24-month) and 7-year (84-month) low pass filters
    # for the SOI data which is monthly.
    wgts24 = low_pass_weights(window, 1. / 24.)
    wgts84 = low_pass_weights(window, 1. / 84.)

    # Apply each filter using the rolling_window method used with the weights
    # keyword argument. A weighted sum is required because the magnitude of
    # the weights are just as important as their relative sizes.
    soi24 = soi.rolling_window('time',
                               iris.analysis.SUM,
                               len(wgts24),
                               weights=wgts24)
    soi84 = soi.rolling_window('time',
                               iris.analysis.SUM,
                               len(wgts84),
                               weights=wgts84)

    # Plot the SOI time series and both filtered versions.
    plt.figure(figsize=(9, 4))
    iplt.plot(soi,
              color='0.7',
              linewidth=1.,
              linestyle='-',
              alpha=1.,
              label='no filter')
    iplt.plot(soi24,
              color='b',
              linewidth=2.,
              linestyle='-',
              alpha=.7,
              label='2-year filter')
    iplt.plot(soi84,
              color='r',
              linewidth=2.,
              linestyle='-',
              alpha=.7,
              label='7-year filter')
    plt.ylim([-4, 4])
    plt.title('Southern Oscillation Index (Darwin Only)')
    plt.xlabel('Time')
    plt.ylabel('SOI')
    plt.legend(fontsize=10)
    iplt.show()
Example #42
0
def main():
    fname = iris.sample_data_path('air_temp.pp')

    temperature_orig = iris.load_cube(fname)

    temperature_noisy = copy.deepcopy(temperature_orig)
    sx = (temperature_orig.data.shape)[0]
    sy = (temperature_orig.data.shape)[1]
    gaussian_noise = np.random.normal(loc=0.0, scale=5, size=(sx, sy))
    gaussian_noise[:sx/4, :] = 0
    gaussian_noise[(3*sx)/4:, :] = 0
    gaussian_noise[:, sy/4:(3*sy)/4] = 0
    temperature_noisy.data =  2*temperature_noisy.data + gaussian_noise

    #Original data
    plt.figure(figsize=(12, 5))
    plt.subplot(221)
    qplt.contourf(temperature_orig, 15)
    plt.gca().coastlines()

    #Noisy data
    plt.subplot(222)
    qplt.contourf(temperature_noisy, 15)
    plt.gca().coastlines()

    # Plot scatter
    scatter_x = temperature_orig.data.flatten()
    scatter_y = temperature_noisy.data.flatten()
    plt.subplot(223)
    plt.plot(scatter_x, scatter_y, '.', label="scatter")
    coeffs = np.polyfit(scatter_x, scatter_y, 1)
    print(coeffs)
    plt.title("Scatter plot")
    plt.xlabel("orig [K]")
    plt.ylabel("noisy [K]")
    fitted_y = np.polyval(coeffs, scatter_x)
    plt.plot(scatter_x, fitted_y, 'k', label="fit")
    plt.text(np.min(scatter_x), np.max(fitted_y), "\nax+b\na=%f3\nb=%g4" % (coeffs[0], coeffs[1]))

    #Plot
    diff_y = scatter_y - fitted_y
    temperature_diff = copy.deepcopy(temperature_orig)
    temperature_diff.data = diff_y.reshape(temperature_noisy.data.shape)
    temperature_diff.standard_name = None
    temperature_diff.long_name = "Residual from fitted curve"
    temperature_diff.var_name = "Hello_World"


    plt.subplot(224)
    qplt.contourf(temperature_diff, 15)
    plt.gca().coastlines()



    iplt.show()
Example #43
0
def main():

    # load the monthly-valued Southern Oscillation Index (SOI) time-series
    fname = iris.sample_data_path('SOI_Darwin.nc')
    soi = iris.load_cube(fname)

    # window length for filters
    window = 121

    # construct 2-year (24-month) and 7-year (84-month) low pass filters
    # for the SOI data which is monthly
    wgts24 = low_pass_weights(window, 1. / 24.)  #
    wgts84 = low_pass_weights(window, 1. / 84.)

    # apply the filters using the rolling_window method with the weights
    # keyword argument
    soi24 = soi.rolling_window('time',
                               iris.analysis.MEAN,
                               len(wgts24),
                               weights=wgts24)
    soi84 = soi.rolling_window('time',
                               iris.analysis.MEAN,
                               len(wgts84),
                               weights=wgts84)

    # plot the SOI time series and both filtered versions
    fig = plt.figure(figsize=(9, 4))
    iplt.plot(soi,
              coords=['time'],
              color='0.7',
              linewidth=1.,
              linestyle='-',
              alpha=1.,
              label='no filter')
    iplt.plot(soi24,
              coords=['time'],
              color='b',
              linewidth=2.,
              linestyle='-',
              alpha=.7,
              label='2-year filter')
    iplt.plot(soi84,
              coords=['time'],
              color='r',
              linewidth=2.,
              linestyle='-',
              alpha=.7,
              label='7-year filter')
    plt.ylim([-4, 4])
    plt.title('Southern Oscillation Index (Darwin Only)')
    plt.xlabel('Time')
    plt.ylabel('SOI')
    plt.legend(fontsize=10)
    plt.show()
Example #44
0
def main():
    fname = iris.sample_data_path('colpex.pp')
    
    # The list of phenomena of interest
    phenomena = ['air_potential_temperature', 'air_pressure']
    
    # Define the constraint on standard name and model level
    constraints = [iris.Constraint(phenom, model_level_number=1) for
                   phenom in phenomena]
    
    air_potential_temperature, air_pressure = iris.load_cubes(fname,
                                                              constraints) 
    
    # Define a coordinate which represents 1000 hPa
    p0 = coords.AuxCoord(1000, long_name='P0', units='hPa')
    # Convert reference pressure 'p0' into the same units as 'air_pressure'
    p0.convert_units(air_pressure.units)
    
    # Calculate Exner pressure
    exner_pressure = (air_pressure / p0) ** (287.05 / 1005.0)
    # Set the name (the unit is scalar)
    exner_pressure.rename('exner_pressure')
    
    # Calculate air_temp
    air_temperature = exner_pressure * air_potential_temperature
    # Set the name (the unit is K)
    air_temperature.rename('air_temperature')
    
    # Now create an iterator which will give us lat lon slices of
    # exner pressure and air temperature in the form
    # (exner_slice, air_temp_slice).
    lat_lon_slice_pairs = iris.iterate.izip(exner_pressure,
                                            air_temperature,
                                            coords=['grid_latitude',
                                                    'grid_longitude'])

    plt.figure(figsize=(8, 4))
    for exner_slice, air_temp_slice in lat_lon_slice_pairs:
        plt.subplot(121)
        cont = qplt.contourf(exner_slice)
    
        # The default colorbar has a few too many ticks on it, causing text to
        # overlap. Therefore, limit the number of ticks.
        limit_colorbar_ticks(cont)
    
        plt.subplot(122)
        cont = qplt.contourf(air_temp_slice)
        limit_colorbar_ticks(cont)
        plt.show()
    
        # For the purposes of this example, break after the first loop - we
        # only want to demonstrate the first plot.
        break
Example #45
0
def main():
    # Enable a future option, to ensure that the grib load works the same way
    # as in future Iris versions.
    iris.FUTURE.strict_grib_load = True

    file_path = iris.sample_data_path('polar_stereo.grib2')
    cube = iris.load_cube(file_path)
    qplt.contourf(cube)
    ax = plt.gca()
    ax.coastlines()
    ax.gridlines()
    iplt.show()
def main():
    fname = iris.sample_data_path('rotated_pole.nc')
    temperature = iris.load_strict(fname)
    
    # Calculate the lat lon range and buffer it by 10 degrees
    lat_range, lon_range = iris.analysis.cartography.lat_lon_range(temperature)
    lat_range = lat_range[0] - 10, lat_range[1] + 10
    lon_range = lon_range[0] - 10, lon_range[1] + 10

    
    # Plot #1: Point plot showing data values & a colorbar
    plt.figure()
    iplt.map_setup(temperature, lat_range=lat_range, lon_range=lon_range)
    points = qplt.points(temperature, c=temperature.data)
    cb = plt.colorbar(points, orientation='horizontal')
    cb.set_label(temperature.units)
    iplt.gcm().drawcoastlines()
    plt.show()
    
    
    # Plot #2: Contourf of the point based data
    plt.figure()
    iplt.map_setup(temperature, lat_range=lat_range, lon_range=lon_range)
    qplt.contourf(temperature, 15)
    iplt.gcm().drawcoastlines()
    plt.show()
    
    
    # Plot #3: Contourf overlayed by coloured point data
    plt.figure()
    iplt.map_setup(temperature, lat_range=lat_range, lon_range=lon_range)
    qplt.contourf(temperature)
    iplt.points(temperature, c=temperature.data)
    iplt.gcm().drawcoastlines()
    plt.show()
    
    
    
    # For the purposes of this example, add some bounds to the latitude and longitude
    temperature.coord('grid_latitude').guess_bounds()
    temperature.coord('grid_longitude').guess_bounds()
    
    
    # Plot #4: Block plot
    plt.figure()
    iplt.map_setup(temperature, lat_range=lat_range, lon_range=lon_range)
    iplt.pcolormesh(temperature)
    iplt.gcm().bluemarble()
    iplt.gcm().drawcoastlines()
    plt.show()
Example #47
0
    def test_trajectory(self):
        cube = iris.load_cube(iris.sample_data_path('air_temp.pp'))

        # extract a trajectory
        xpoint = cube.coord('longitude').points[:10]
        ypoint = cube.coord('latitude').points[:10]
        sample_points = [('latitude', xpoint), ('longitude', ypoint)]
        traj = iris.analysis.trajectory.interpolate(cube, sample_points)

        # save, reload and check
        with self.temp_filename(suffix='.nc') as temp_filename:
            iris.save(traj, temp_filename)
            reloaded = iris.load_cube(temp_filename)
            self.assertCML(reloaded, ('netcdf', 'save_load_traj.cml'))
Example #48
0
    def test_trajectory(self):
        cube = iris.load_cube(iris.sample_data_path('air_temp.pp'))

        # extract a trajectory
        xpoint = cube.coord('longitude').points[:10]
        ypoint = cube.coord('latitude').points[:10]
        sample_points = [('latitude', xpoint), ('longitude', ypoint)]
        traj = iris.analysis.trajectory.interpolate(cube, sample_points)

        # save, reload and check
        with self.temp_filename(suffix='.nc') as temp_filename:
            iris.save(traj, temp_filename)
            reloaded = iris.load_cube(temp_filename)
            self.assertCML(reloaded, ('netcdf', 'save_load_traj.cml'))
Example #49
0
def main():
    fname = iris.sample_data_path('colpex.pp')

    # the list of phenomena of interest
    phenomena = ['air_potential_temperature', 'air_pressure']

    # define the constraint on standard name and model level
    constraints = [
        iris.Constraint(phenom, model_level_number=1) for phenom in phenomena
    ]

    air_potential_temperature, air_pressure = iris.load_strict(
        fname, constraints)

    # define a coordinate which represents 1000 hPa
    p0 = coords.AuxCoord(100000, long_name='P0', units='Pa')

    # calculate Exner pressure
    exner_pressure = (air_pressure / p0)**(287.05 / 1005.0)
    # set the standard name (the unit is scalar)
    exner_pressure.rename('exner_pressure')

    # calculate air_temp
    air_temperature = exner_pressure * air_potential_temperature
    # set phenomenon definition and unit
    air_temperature.standard_name = 'air_temperature'
    air_temperature.units = 'K'

    # Now create an iterator which will give us lat lon slices of exner pressure and air temperature in
    # the form [exner_slice, air_temp_slice]
    lat_lon_slice_pairs = itertools.izip(
        exner_pressure.slices(['grid_latitude', 'grid_longitude']),
        air_temperature.slices(['grid_latitude', 'grid_longitude']))
    plt.figure(figsize=(8, 4))

    for exner_slice, air_temp_slice in lat_lon_slice_pairs:
        plt.subplot(121)
        cont = qplt.contourf(exner_slice)

        # The default colorbar has a few too many ticks on it, causing text to overlap. Therefore, limit the number of ticks
        limit_colorbar_ticks(cont)

        plt.subplot(122)
        cont = qplt.contourf(air_temp_slice)
        limit_colorbar_ticks(cont)
        plt.show()

        # For the purposes of this example, break after the first loop - we only want to demonstrate the first plot
        break
Example #50
0
def draw(geoaxes):
    import iris
    import iris.plot as iplt

    # Add some high-resolution coastlines so we can produce nice results
    # even when zoomed a long way in.
    geoaxes.coastlines('10m')

    fname = iris.sample_data_path('rotated_pole.nc')
    temperature = iris.load_cube(fname)
    iplt.pcolormesh(temperature)

    # Do the initial draw so that the coastlines are projected
    # (the slow part).
    plt.draw()
Example #51
0
def main():
    # Load the "total electron content" cube.
    filename = iris.sample_data_path('space_weather.nc')
    cube = iris.load_cube(filename, 'total electron content')

    # Explicitly mask negative electron content.
    cube.data = ma.masked_less(cube.data, 0)

    # Plot the cube using one hundred colour levels.
    qplt.contourf(cube, 100)
    plt.title('Total Electron Content')
    plt.xlabel('longitude / degrees')
    plt.ylabel('latitude / degrees')
    plt.gca().stock_img()
    plt.gca().coastlines()
    iplt.show()
def main():
    fname = iris.sample_data_path('hybrid_height.nc')
    theta = iris.load_strict(fname)
    
    # Extract a single height vs longitude cross-section. N.B. This could easily be changed to
    # extract a specific slice, or even to loop over *all* cross section slices.
    cross_section = theta.slices(['grid_longitude', 'model_level_number']).next()
    
    qplt.contourf(cross_section, coords=['grid_longitude', 'altitude'])
    plt.show()
    
    # Now do the equivalent plot, only against model level
    plt.figure()
    
    qplt.contourf(cross_section, coords=['grid_longitude', 'model_level_number'])
    plt.show()