Ejemplo n.º 1
0
    def test_custom_rules(self):
        # Test custom rule evaluation.
        # Default behaviour
        data_path = tests.get_data_path(('PP', 'aPPglob1', 'global.pp'))
        cube = iris.load_strict(data_path)
        self.assertEqual(cube.standard_name, 'air_temperature')

        # Custom behaviour
        temp_path = iris.util.create_temp_filename()
        f = open(temp_path, 'w')
        f.write('\n'.join((
            'IF',
            'f.lbuser[3] == 16203',
            'THEN',
            'CMAttribute("standard_name", None)', 
            'CMAttribute("long_name", "customised")'))) 
        f.close()
        iris.fileformats.pp.add_load_rules(temp_path)
        cube = iris.load_strict(data_path)
        self.assertEqual(cube.name(), 'customised')
        os.remove(temp_path)
        
        # Back to default
        iris.fileformats.pp.reset_load_rules()
        cube = iris.load_strict(data_path)
        self.assertEqual(cube.standard_name, 'air_temperature')
Ejemplo n.º 2
0
 def test_invalid_return_type_callback(self):
     def invalid_callback(cube, field, filename):
         return "Not valid to return a string"
     fname = tests.get_data_path(["PP", "trui", "air_temp_init", "200812011200__qwqu12ff.initanl.pp"])
     
     with self.assertRaises(TypeError):
         iris.load_strict(fname, callback=invalid_callback) 
Ejemplo n.º 3
0
    def test_pp_callback(self):
        fname = tests.get_data_path(["PP", "trui", "air_temp_T24", "200812011200__qwqg12ff.T24.pp"])
        cube = iris.load_strict(fname, callback=truipp_filename_callback)
        self.assertCML(cube, ['uri_callback', 'trui_t24.cml'])

        fname = tests.get_data_path(["PP", "trui", "air_temp_init", "200812011200__qwqu12ff.initanl.pp"])
        cube = iris.load_strict(fname, callback=truipp_filename_callback)
        self.assertCML(cube, ['uri_callback', 'trui_init.cml'])
Ejemplo n.º 4
0
def main():
    # Load data into three Cubes, one for each set of PP files
    e1 = iris.load_strict(iris.sample_data_path('E1_north_america.nc'))
    
    a1b = iris.load_strict(iris.sample_data_path('A1B_north_america.nc'))
    
    # load in the global pre-industrial mean temperature, and limit the domain to
    # the same North American region that e1 and a1b are at.
    north_america = iris.Constraint(
                                    longitude=lambda v: 225 <= v <= 315,
                                    latitude=lambda v: 15 <= v <= 60,
                                    )
    pre_industrial = iris.load_strict(iris.sample_data_path('pre-industrial.pp'),
                                  north_america
                                  )
    
    pre_industrial_mean = pre_industrial.collapsed(['latitude', 'longitude'], iris.analysis.MEAN)
    e1_mean = e1.collapsed(['latitude', 'longitude'], iris.analysis.MEAN)
    a1b_mean = a1b.collapsed(['latitude', 'longitude'], iris.analysis.MEAN)
    
    # Show ticks 30 years apart
    plt.gca().xaxis.set_major_locator(mdates.YearLocator(30))
    
    # Label the ticks with year data
    plt.gca().format_xdata = mdates.DateFormatter('%Y')
    
    # Plot the datasets
    qplt.plot(e1_mean, coords=['time'], label='E1 scenario', lw=1.5, color='blue')
    qplt.plot(a1b_mean, coords=['time'], label='A1B-Image scenario', lw=1.5, color='red')
    
    # Draw a horizontal line showing the pre industrial mean
    plt.axhline(y=pre_industrial_mean.data, color='gray', linestyle='dashed', label='pre-industrial', lw=1.5)
    
    # Establish where r and t have the same data, i.e. the observations
    common = numpy.where(a1b_mean.data == e1_mean.data)[0]
    observed = a1b_mean[common]
    
    # Plot the observed data
    qplt.plot(observed, coords=['time'], label='observed', color='black', lw=1.5)
    
    # Add a legend and title
    plt.legend(loc="upper left")
    plt.title('North American mean air temperature', fontsize=18)
    
    plt.xlabel('Time / year')
    
    plt.grid()

    iplt.show()
Ejemplo n.º 5
0
    def test_process_flags(self):
        # Test that process flags are created for correct values of lbproc
        orig_file = tests.get_data_path(('PP', 'aPPglob1', 'global.pp'))
   
        # Values that result in process flags attribute NOT being created
        omit_process_flags_values = (128, 4096, 8192)
        
        # Test single flag values
        for value, _ in iris.fileformats.pp.LBPROC_PAIRS:
            f = iris.fileformats.pp.load(orig_file).next()
            f.lbproc = value # set value

            # Write out pp file
            temp_filename = iris.util.create_temp_filename(".pp")
            f.save(open(temp_filename, 'wb'))
        
            # Load pp file
            cube = iris.load_strict(temp_filename)

            if value in omit_process_flags_values:
                # Check ukmo__process_flags attribute not created
                self.assertEqual(cube.attributes.get("ukmo__process_flags", None), None)
            else:
                # Check ukmo__process_flags attribute contains correct values
                self.assertIn(iris.fileformats.pp.lbproc_map[value], cube.attributes["ukmo__process_flags"])
        
            os.remove(temp_filename) 

        # Test multiple flag values
        multiple_bit_values = ((128, 64), (4096, 1024), (8192, 1024))
        
        # Maps lbproc value to the process flags that should be created
        multiple_map = {sum(x) : [iris.fileformats.pp.lbproc_map[y] for y in x] for x in multiple_bit_values}
        
        for bit_values in multiple_bit_values:
            f = iris.fileformats.pp.load(orig_file).next()
            f.lbproc = sum(bit_values) # set value

            # Write out pp file
            temp_filename = iris.util.create_temp_filename(".pp")
            f.save(open(temp_filename, 'wb'))
        
            # Load pp file
            cube = iris.load_strict(temp_filename)

            # Check the process flags created
            self.assertEquals(set(cube.attributes["ukmo__process_flags"]), set(multiple_map[sum(bit_values)]), "Mismatch between expected and actual process flags.")

            os.remove(temp_filename)
Ejemplo n.º 6
0
    def test_meanTrial_diff(self):
        air_temp_T00_cube = iris.load_strict(tests.get_data_path(('PP', 'trui', 'air_temp_init', '*.pp')))
        self.assertCML(air_temp_T00_cube, ['trui', 'air_temp_T00.cml'])
        
        air_temp_T24_cube = iris.load_strict(tests.get_data_path(('PP', 'trui', 'air_temp_T24', '*.T24.pp')))
        
        self.assertCML(air_temp_T24_cube, ['trui', 'air_temp_T24.cml'])

        air_temp_T00_cube, air_temp_T24_cube = iris.analysis.maths.intersection_of_cubes(air_temp_T00_cube, air_temp_T24_cube)
        
        delta_cube = air_temp_T00_cube - air_temp_T24_cube
        self.assertCML(delta_cube, ('trui', 'air_temp_trial_diff_T00_to_T24.cml'))
        
        mean_delta_cube = delta_cube.collapsed("time", iris.analysis.MEAN)
        self.assertCML(mean_delta_cube, ('trui', 'mean_air_temp_trial_diff_T00_to_T24.cml'))
def main():
    fname = iris.sample_data_path('air_temp.pp')
    
    # Load exactly one cube from the given file
    temperature = iris.load_strict(fname)
    
    # We only want a small number of latitudes, so filter some out using "extract".
    temperature = temperature.extract(iris.Constraint(latitude=lambda cell: 68 <= cell < 78))

    for cube in temperature.slices('longitude'):  
        
        # Create a string label to identify this cube (i.e. latitude: value)
        cube_label = 'latitude: %s' % cube.coord('latitude').points[0]
        
        # Plot the cube, and associate it with a label
        qplt.plot(cube, label=cube_label)
    
    # Add the legend with 2 columns
    plt.legend(ncol=2)
    
    # Put a grid on the plot
    plt.grid(True)

    # tell matplotlib not to extend the plot axes range to nicely rounded numbers.
    plt.axis('tight')

    # Finally, show it.
    plt.show()
Ejemplo n.º 8
0
    def system_test_supported_filetypes(self):
        nx, ny = 60, 60
        dataarray = np.arange(nx * ny, dtype='>f4').reshape(nx, ny)

        laty = np.linspace(0, 59, ny)
        lonx = np.linspace(30, 89, nx)

        horiz_cs = lambda : iris.coord_systems.LatLonCS(
                        iris.coord_systems.SpheroidDatum("spherical", 6371229.0, flattening=0.0, units=iris.unit.Unit('m')),
                        iris.coord_systems.PrimeMeridian(label="Greenwich", value=0.0),
                        iris.coord_systems.GeoPosition(90.0, 0.0), 0.0)

        cm = iris.cube.Cube(data=dataarray, long_name="System test data", units='m s-1')
        cm.add_dim_coord(
            iris.coords.DimCoord(laty, 'latitude', units='degrees',
                                 coord_system=horiz_cs()),
            0)
        cm.add_dim_coord(
            iris.coords.DimCoord(lonx, 'longitude', units='degrees',
                coord_system=horiz_cs()),
            1)
        cm.add_aux_coord(iris.coords.AuxCoord(9, 'forecast_period', units='hours'))
        hours_since_epoch = iris.unit.Unit('hours since epoch', iris.unit.CALENDAR_GREGORIAN)
        cm.add_aux_coord(iris.coords.AuxCoord(3, 'time', units=hours_since_epoch))
        cm.add_aux_coord(iris.coords.AuxCoord(99, long_name='pressure', units='Pa'))
 
        cm.assert_valid()

        for filetype in ('.nc', '.pp' , '.grib2'):
            saved_tmpfile = iris.util.create_temp_filename(suffix=filetype)
            iris.save(cm, saved_tmpfile)

            new_cube = iris.load_strict(saved_tmpfile)
        
            self.assertCML(new_cube, ('system', 'supported_filetype_%s.cml' % filetype))
def main():
    fname = iris.sample_data_path('NAME_output.txt')

    boundary_volc_ash_constraint = iris.Constraint(
        'VOLCANIC_ASH_AIR_CONCENTRATION', flight_level='From FL000 - FL200')

    # Callback shown as None to illustrate where a cube-level callback function would be used if required
    cube = iris.load_strict(fname, boundary_volc_ash_constraint, callback=None)

    map = iplt.map_setup(lon_range=[-70, 20],
                         lat_range=[20, 75],
                         resolution='i')

    map.drawcoastlines()

    iplt.contourf(cube,
                  levels=(0.0002, 0.002, 0.004, 1),
                  colors=('#80ffff', '#939598', '#e00404'),
                  extend='max')

    time = cube.coord('time')
    time_date = time.units.num2date(time.points[0]).strftime(UTC_format)
    plt.title('Volcanic ash concentration forecast\nvalid at %s' % time_date)

    plt.show()
Ejemplo n.º 10
0
 def test_grib_callback(self):
     def grib_thing_getter(cube, field, filename):
         cube.add_aux_coord(iris.coords.AuxCoord(field.extra_keys['_periodStartDateTime'], long_name='random element', units='no_unit'))
         
     fname = tests.get_data_path(('GRIB', 'global_t', 'global.grib2'))
     cube = iris.load_strict(fname, callback=grib_thing_getter)
     self.assertCML(cube, ['uri_callback', 'grib_global.cml'])
Ejemplo n.º 11
0
def main():
    fname = iris.sample_data_path('ostia_monthly.nc')
    
    # load a single cube of surface temperature between +/- 5 latitude
    cube = iris.load_strict(fname, iris.Constraint('surface_temperature', latitude=lambda v: -5 < v < 5))
    
    # Take the mean over latitude
    cube = cube.collapsed('latitude', iris.analysis.MEAN)
    
    # Now that we have our data in a nice way, lets create the plot
    # contour with 20 levels
    qplt.contourf(cube, 20)
    
    # Put a custom label on the y axis 
    plt.ylabel('Time / years')
    
    # Stop matplotlib providing clever axes range padding
    plt.axis('tight')
    
    # As we are plotting annual variability, put years as the y ticks
    plt.gca().yaxis.set_major_locator(mdates.YearLocator())
    
    # And format the ticks to just show the year
    plt.gca().yaxis.set_major_formatter(mdates.DateFormatter('%Y'))
    
    plt.show()
Ejemplo n.º 12
0
def main():
    fname = iris.sample_data_path('air_temp.pp')

    # Load exactly one cube from the given file
    temperature = iris.load_strict(fname)

    # We only want a small number of latitudes, so filter some out using "extract".
    temperature = temperature.extract(
        iris.Constraint(latitude=lambda cell: 68 <= cell < 78))

    for cube in temperature.slices('longitude'):

        # Create a string label to identify this cube (i.e. latitude: value)
        cube_label = 'latitude: %s' % cube.coord('latitude').points[0]

        # Plot the cube, and associate it with a label
        qplt.plot(cube, label=cube_label)

    # Add the legend with 2 columns
    plt.legend(ncol=2)

    # Put a grid on the plot
    plt.grid(True)

    # tell matplotlib not to extend the plot axes range to nicely rounded numbers.
    plt.axis('tight')

    # Finally, show it.
    plt.show()
Ejemplo n.º 13
0
 def setUp(self):
     self.airtemp, self.humidity = iris.load_strict(iris.tests.get_data_path(('PP', 'globClim1', 'dec_subset.pp')), 
                                                    ['air_potential_temperature', 'specific_humidity'])
     # Reduce the size of cubes to make tests run a bit quicker
     self.airtemp = self.airtemp[0:5, 0:10, 0:12]
     self.humidity = self.humidity[0:5, 0:10, 0:12]
     self.coords = ['latitude', 'longitude']
Ejemplo n.º 14
0
def main():
    fname = iris.sample_data_path('air_temp.pp')
    temperature = iris.load_strict(fname)
    
    qplt.contourf(temperature, 15)
    iplt.gcm().drawcoastlines()
    plt.show()
Ejemplo n.º 15
0
def main():
    fname = iris.sample_data_path('ostia_monthly.nc')

    # load a single cube of surface temperature between +/- 5 latitude
    cube = iris.load_strict(
        fname,
        iris.Constraint('surface_temperature', latitude=lambda v: -5 < v < 5))

    # Take the mean over latitude
    cube = cube.collapsed('latitude', iris.analysis.MEAN)

    # Now that we have our data in a nice way, lets create the plot
    # contour with 20 levels
    qplt.contourf(cube, 20)

    # Put a custom label on the y axis
    plt.ylabel('Time / years')

    # Stop matplotlib providing clever axes range padding
    plt.axis('tight')

    # As we are plotting annual variability, put years as the y ticks
    plt.gca().yaxis.set_major_locator(mdates.YearLocator())

    # And format the ticks to just show the year
    plt.gca().yaxis.set_major_formatter(mdates.DateFormatter('%Y'))

    plt.show()
Ejemplo n.º 16
0
    def test_cell_methods(self):
        # Test cell methods are created for correct values of lbproc
        orig_file = tests.get_data_path(('PP', 'aPPglob1', 'global.pp'))
        
        # Values that result in cell methods being created
        cell_method_values = {128 : "mean", 4096 : "minimum", 8192 : "maximum"}
        
        # Make test values as list of single bit values and some multiple bit values
        single_bit_values = list(iris.fileformats.pp.LBPROC_PAIRS)
        multiple_bit_values = [(128 + 64, ""), (4096 + 2096, ""), (8192 + 1024, "")]
        test_values = list(single_bit_values) + multiple_bit_values
        
        for value, _ in test_values:
            f = iris.fileformats.pp.load(orig_file).next()
            f.lbproc = value # set value

            # Write out pp file
            temp_filename = iris.util.create_temp_filename(".pp")
            f.save(open(temp_filename, 'wb'))
        
            # Load pp file
            cube = iris.load_strict(temp_filename)
        
            if value in cell_method_values:
                # Check for cell method on cube
                self.assertEqual(cube.cell_methods[0].method, cell_method_values[value])
            else:
                # Check no cell method was created for values other than 128, 4096, 8192
                self.assertEqual(len(cube.cell_methods), 0)
        
            os.remove(temp_filename)   
Ejemplo n.º 17
0
 def test_deferred_loading(self):
     # Test exercising CF-netCDF deferred loading and deferred slicing.
     # shape (31, 161, 320)
     cube = iris.load_strict(tests.get_data_path(('NetCDF', 'global', 'xyt', 'SMALL_total_column_co2.nc')))
     
     # Consecutive index on same dimension.
     self.assertCML(cube[0], ('netcdf', 'netcdf_deferred_index_0.cml'))
     self.assertCML(cube[0][0], ('netcdf', 'netcdf_deferred_index_1.cml'))
     self.assertCML(cube[0][0][0], ('netcdf', 'netcdf_deferred_index_2.cml'))
     
     # Consecutive slice on same dimension.
     self.assertCML(cube[0:20], ('netcdf', 'netcdf_deferred_slice_0.cml'))
     self.assertCML(cube[0:20][0:10], ('netcdf', 'netcdf_deferred_slice_1.cml'))
     self.assertCML(cube[0:20][0:10][0:5], ('netcdf', 'netcdf_deferred_slice_2.cml'))
     
     # Consecutive tuple index on same dimension.
     self.assertCML(cube[(0, 8, 4, 2, 14, 12), ], ('netcdf', 'netcdf_deferred_tuple_0.cml'))
     self.assertCML(cube[(0, 8, 4, 2, 14, 12), ][(0, 2, 4, 1), ], ('netcdf', 'netcdf_deferred_tuple_1.cml'))
     subcube = cube[(0, 8, 4, 2, 14, 12), ][(0, 2, 4, 1), ][(1, 3), ]
     self.assertCML(subcube, ('netcdf', 'netcdf_deferred_tuple_2.cml'))
     
     # Consecutive mixture on same dimension.
     self.assertCML(cube[0:20:2][(9, 5, 8, 0), ][3], ('netcdf', 'netcdf_deferred_mix_0.cml'))
     self.assertCML(cube[(2, 7, 3, 4, 5, 0, 9, 10), ][2:6][3], ('netcdf', 'netcdf_deferred_mix_0.cml'))
     self.assertCML(cube[0][(0, 2), (1, 3)], ('netcdf', 'netcdf_deferred_mix_1.cml'))
Ejemplo n.º 18
0
    def test_extended_proxy_data(self):
        # Get the empty theta cubes for T+1.5 and T+2
        data_path = tests.get_data_path(('PP', 'COLPEX', 'theta_and_orog.pp'))
        phenom_constraint = iris.Constraint('air_potential_temperature')
        forecast_period_constraint1 = iris.Constraint(forecast_period=1.1666666753590107)
        forecast_period_constraint2 = iris.Constraint(forecast_period=1.3333333320915699)
        forecast_period_constraint1_and_2 = iris.Constraint(forecast_period=lambda c: c in [1.1666666753590107, 1.3333333320915699])
        cube1 = iris.load_strict(data_path, phenom_constraint & forecast_period_constraint1)
        cube2 = iris.load_strict(data_path, phenom_constraint & forecast_period_constraint2)
        
        # Merge the two halves
        cubes = iris.cube.CubeList([cube1, cube2]).merge(True)
        self.assertCML(cubes, ('merge', 'theta_two_forecast_periods.cml'))

        # Make sure we get the same result directly from load
        cube = iris.load_strict(data_path, phenom_constraint & (forecast_period_constraint1_and_2))
        self.assertCML(cubes, ('merge', 'theta_two_forecast_periods.cml'))
Ejemplo n.º 19
0
 def test_cube_pickle(self):
     cube = iris.load_strict(tests.get_data_path(('PP', 'globClim1', 'theta.pp')))
     self.assertCML(cube, ('cube_io', 'pickling', 'theta.cml'), checksum=False)
     
     for _, recon_cube in self.pickle_then_unpickle(cube):
         self.assertNotEqual(recon_cube._data_manager, None)
         self.assertEqual(cube._data_manager, recon_cube._data_manager)
         self.assertCML(recon_cube, ('cube_io', 'pickling', 'theta.cml'), checksum=False)
Ejemplo n.º 20
0
    def test_process_flags(self):
        # Test single process flags
        for _, process_desc in iris.fileformats.pp.LBPROC_PAIRS[1:]:
            # Get basic cube and set process flag manually
            ll_cube = stock.lat_lon_cube()
            ll_cube.attributes["ukmo__process_flags"] = (process_desc,)
    
            # Save cube to netCDF
            temp_filename = iris.util.create_temp_filename(".nc")
            iris.save(ll_cube, temp_filename)

            # Reload cube     
            cube = iris.load_strict(temp_filename)
            
            
            # Check correct number and type of flags
            self.assertTrue(len(cube.attributes["ukmo__process_flags"]) == 1, "Mismatch in number of process flags.")
            process_flag = cube.attributes["ukmo__process_flags"][0]
            self.assertEquals(process_flag, process_desc)
            
            os.remove(temp_filename)

        # Test mutiple process flags
        multiple_bit_values = ((128, 64), (4096, 1024), (8192, 1024))
        
        # Maps lbproc value to the process flags that should be created
        multiple_map = {bits : [iris.fileformats.pp.lbproc_map[bit] for bit in bits] for bits in multiple_bit_values}

        for bits, descriptions in multiple_map.iteritems():
        
            ll_cube = stock.lat_lon_cube()
            ll_cube.attributes["ukmo__process_flags"] = descriptions
            
            # Save cube to netCDF
            temp_filename = iris.util.create_temp_filename(".nc")
            iris.save(ll_cube, temp_filename)
            
            # Reload cube     
            cube = iris.load_strict(temp_filename)
            
            # Check correct number and type of flags
            process_flags = cube.attributes["ukmo__process_flags"]
            self.assertTrue(len(process_flags) == len(bits), "Mismatch in number of process flags.")
            self.assertEquals(set(process_flags), set(descriptions))

            os.remove(temp_filename)
Ejemplo n.º 21
0
    def test_fp_units(self):

        """Test different units for forecast period (just the ones we care about)."""
        cube = iris.load_strict(tests.get_data_path(('GRIB', 'fp_units', 'minutes.grib2')))
        self.assertEqual(cube.coord("forecast_period").units, "hours")
        self.assertEqual(cube.coord("forecast_period").points[0], 24)

        cube = iris.load_strict(tests.get_data_path(('GRIB', 'fp_units', 'hours.grib2')))
        self.assertEqual(cube.coord("forecast_period").units, "hours")
        self.assertEqual(cube.coord("forecast_period").points[0], 24)

        cube = iris.load_strict(tests.get_data_path(('GRIB', 'fp_units', 'days.grib2')))
        self.assertEqual(cube.coord("forecast_period").units, "hours")
        self.assertEqual(cube.coord("forecast_period").points[0], 24)
        
        cube = iris.load_strict(tests.get_data_path(('GRIB', 'fp_units', 'seconds.grib2')))
        self.assertEqual(cube.coord("forecast_period").units, "hours")
        self.assertEqual(cube.coord("forecast_period").points[0], 24)
Ejemplo n.º 22
0
    def setUp(self):
        cube_path = tests.get_data_path(('PP', 'aPProt1', 'rotatedMHtimecube.pp'))
        cube = iris.load_strict(cube_path)[0]

        # Until there is better mapping support for rotated-pole, pretend this isn't rotated.
        # ie. Move the pole from (37.5, 177.5) to (90, 0) and bodge the coordinates.
        _pretend_unrotated(cube)

        self.cube = cube
Ejemplo n.º 23
0
 def test_izip_missing_slice_coords(self):
     # Remove latitude coordinate from one of the cubes
     self.humidity.remove_coord('latitude')
     with self.assertRaises(iris.exceptions.CoordinateNotFoundError):
         iris.iterate.izip(self.airtemp, self.humidity, coords=self.coords)
     # Use a cube with grid_latitude and grid_longitude rather than latitude and longitude
     othercube = iris.load_strict(iris.tests.get_data_path(('PP', 'uk4', 'uk4par09.pp')), 'air_temperature')
     with self.assertRaises(iris.exceptions.CoordinateNotFoundError):
         iris.iterate.izip(self.airtemp, othercube, coords=self.coords)
Ejemplo n.º 24
0
    def test_netcdf_save_ndim_auxiliary(self):
        # Test saving a CF-netCDF file with multi-dimensional auxiliary coordinates.
        # Read netCDF input file.
        file_in = tests.get_data_path(('NetCDF', 'rotated', 'xyt', 'new_rotPole_precipitation.nc'))
        cube = iris.load_strict(file_in)
        
        # Write Cube to nerCDF file.
        file_out = iris.util.create_temp_filename(suffix='.nc')
        iris.save(cube, file_out)

        # Check the netCDF file against CDL expected output.
        self.assertCDL(file_out, ('netcdf', 'netcdf_save_ndim_auxiliary.cdl'))

        # Read the netCDF file.
        cube = iris.load_strict(file_out)

        # Check the netCDF read, write, read mechanism.
        self.assertCML(cube, ('netcdf', 'netcdf_save_load_ndim_auxiliary.cml'))

        os.remove(file_out)
Ejemplo n.º 25
0
    def test_all(self):
        path = tests.get_data_path(('PP', 'ukVorog', 'ukv_orog_refonly.pp'))
        master_cube = iris.load_strict(path)

        # Check overall behaviour
        cube = master_cube[::10, ::10]
        self._check_both_conversions(cube)

        # Check numerical stability
        cube = master_cube[210:238, 424:450]
        self._check_both_conversions(cube)
Ejemplo n.º 26
0
    def test_netcdf_hybrid_height(self):
        # Test saving a CF-netCDF file which contains an atmosphere hybrid height (dimensionless vertical) coordinate.
        # Read PP input file.
        file_in = tests.get_data_path(('PP', 'COLPEX', 'theta_and_orog.pp'))
        cube = iris.load_strict(file_in, 'air_potential_temperature')

        # Write Cube to netCDF file.
        file_out = iris.util.create_temp_filename(suffix='.nc')
        iris.save(cube, file_out)

        # Check the netCDF file against CDL expected output.
        self.assertCDL(file_out, ('netcdf', 'netcdf_save_hybrid_height.cdl'))

        # Read netCDF file.
        cube = iris.load_strict(file_out)

        # Check the PP read, netCDF write, netCDF read mechanism.
        self.assertCML(cube, ('netcdf', 'netcdf_save_load_hybrid_height.cml'))

        os.remove(file_out)
Ejemplo n.º 27
0
def load_strict_once(filename, constraint):
    """Same syntax as load_strict, but will only load a file once, then cache the answer in a dictionary."""
    global _load_strict_once_cache
    key = (filename, str(constraint))
    cube = _load_strict_once_cache.get(key, None)
    
    if cube is None:
        cube = iris.load_strict(filename, constraint)
        _load_strict_once_cache[key] = cube
        
    return cube
Ejemplo n.º 28
0
def _load_theta():
    path = tests.get_data_path(('PP', 'COLPEX', 'theta_and_orog_subset.pp'))
    theta = iris.load_strict(path, 'air_potential_temperature')
    
    # Improve the unit
    theta.units = 'K'

    # Until there is better mapping support for rotated-pole, pretend this isn't rotated.
    # ie. Move the pole from (37.5, 177.5) to (90, 0) and adjust the coordinates.
    tests.test_mapping._pretend_unrotated(theta)

    return theta
def COP_1d(target_dir):
    for scenario in ['E1', 'A1B']:
        fname = os.path.join(DATA_ZOO, 'PP', 'A1B-Image_E1', scenario, 
                             '*.pp'
                             )
        cube = iris.load_strict(fname)
        cube = cube.extract(iris.Constraint(longitude=lambda v: 225 <= v <= 315,
                                           latitude=lambda v: 15 <= v <= 60,
                                           )
                           )
        cube.attributes['Model scenario'] = scenario
        iris.save(cube, os.path.join(target_dir, '%s_north_america.nc' % scenario))
def ukV2_in_userguide(target_dir):
    fname = os.path.join(DATA_ZOO, 'PP', 'ukV2', 'THOxayrk.pp')
    sa = 'm01s00i033'
    ap = 'm01s00i004'
    pt, sa = iris.load_strict(fname, ['air_potential_temperature', 'surface_altitude'])
    # extract, via indices, an area over the north of England
    pt, sa = [cube[..., 290:494, 190:377] for cube in [pt, sa]]
    # remove the temporal dimension of the surface altitude
    sa = sa[0, ...]
    # reduce the height to the first 21 levels (every third)
    pt = pt[:, :21:3, ...]
    cubes = [pt, sa]
    iris.save(cubes, os.path.join(target_dir, 'uk_hires.pp'))
Ejemplo n.º 31
0
    def test_netcdf_save_single(self):
        # Test saving a single CF-netCDF file.
        # Read PP input file. 
        file_in = tests.get_data_path(('PP', 'cf_processing', '000003000000.03.236.000128.1990.12.01.00.00.b.pp'))
        cube = iris.load_strict(file_in)
        
        # Write Cube to netCDF file.
        file_out = iris.util.create_temp_filename(suffix='.nc')
        iris.save(cube, file_out)

        # Check the netCDF file against CDL expected output.
        self.assertCDL(file_out, ('netcdf', 'netcdf_save_single.cdl'))
        os.remove(file_out)
def main():
    fname = iris.sample_data_path('rotated_pole.nc')
    temperature = iris.load_strict(fname)
    
    # Calculate the lat lon range and buffer it by 10 degrees
    lat_range, lon_range = iris.analysis.cartography.lat_lon_range(temperature)
    lat_range = lat_range[0] - 10, lat_range[1] + 10
    lon_range = lon_range[0] - 10, lon_range[1] + 10

    
    # Plot #1: Point plot showing data values & a colorbar
    plt.figure()
    iplt.map_setup(temperature, lat_range=lat_range, lon_range=lon_range)
    points = qplt.points(temperature, c=temperature.data)
    cb = plt.colorbar(points, orientation='horizontal')
    cb.set_label(temperature.units)
    iplt.gcm().drawcoastlines()
    plt.show()
    
    
    # Plot #2: Contourf of the point based data
    plt.figure()
    iplt.map_setup(temperature, lat_range=lat_range, lon_range=lon_range)
    qplt.contourf(temperature, 15)
    iplt.gcm().drawcoastlines()
    plt.show()
    
    
    # Plot #3: Contourf overlayed by coloured point data
    plt.figure()
    iplt.map_setup(temperature, lat_range=lat_range, lon_range=lon_range)
    qplt.contourf(temperature)
    iplt.points(temperature, c=temperature.data)
    iplt.gcm().drawcoastlines()
    plt.show()
    
    
    
    # For the purposes of this example, add some bounds to the latitude and longitude
    temperature.coord('grid_latitude').guess_bounds()
    temperature.coord('grid_longitude').guess_bounds()
    
    
    # Plot #4: Block plot
    plt.figure()
    iplt.map_setup(temperature, lat_range=lat_range, lon_range=lon_range)
    iplt.pcolormesh(temperature)
    iplt.gcm().bluemarble()
    iplt.gcm().drawcoastlines()
    plt.show()
Ejemplo n.º 33
0
    def test_colpex(self):
        # Load the COLPEX data => TZYX
        path = tests.get_data_path(('PP', 'COLPEX', 'theta_and_orog.pp'))
        
        phenom = iris.load_strict(path, 'air_potential_temperature')

        # Select a ZX cross-section.
        cross_section = phenom[0, :, 0, :]

        # Obtain the real-world heights
        altitude = cross_section.coord('altitude')
        self.assertEqual(altitude.shape, (70, 412))
        self.assertEqual(cross_section.coord_dims(altitude), (0, 1))
        self.assertEqual(zlib.crc32(altitude.points), -306406502)
Ejemplo n.º 34
0
def main():
    fname = iris.sample_data_path('colpex.pp')

    # the list of phenomena of interest
    phenomena = ['air_potential_temperature', 'air_pressure']

    # define the constraint on standard name and model level
    constraints = [
        iris.Constraint(phenom, model_level_number=1) for phenom in phenomena
    ]

    air_potential_temperature, air_pressure = iris.load_strict(
        fname, constraints)

    # define a coordinate which represents 1000 hPa
    p0 = coords.AuxCoord(100000, long_name='P0', units='Pa')

    # calculate Exner pressure
    exner_pressure = (air_pressure / p0)**(287.05 / 1005.0)
    # set the standard name (the unit is scalar)
    exner_pressure.rename('exner_pressure')

    # calculate air_temp
    air_temperature = exner_pressure * air_potential_temperature
    # set phenomenon definition and unit
    air_temperature.standard_name = 'air_temperature'
    air_temperature.units = 'K'

    # Now create an iterator which will give us lat lon slices of exner pressure and air temperature in
    # the form [exner_slice, air_temp_slice]
    lat_lon_slice_pairs = itertools.izip(
        exner_pressure.slices(['grid_latitude', 'grid_longitude']),
        air_temperature.slices(['grid_latitude', 'grid_longitude']))
    plt.figure(figsize=(8, 4))

    for exner_slice, air_temp_slice in lat_lon_slice_pairs:
        plt.subplot(121)
        cont = qplt.contourf(exner_slice)

        # The default colorbar has a few too many ticks on it, causing text to overlap. Therefore, limit the number of ticks
        limit_colorbar_ticks(cont)

        plt.subplot(122)
        cont = qplt.contourf(air_temp_slice)
        limit_colorbar_ticks(cont)
        plt.show()

        # For the purposes of this example, break after the first loop - we only want to demonstrate the first plot
        break
Ejemplo n.º 35
0
def main():
    fname = iris.sample_data_path('hybrid_height.nc')
    theta = iris.load_strict(fname)
    
    # Extract a single height vs longitude cross-section. N.B. This could easily be changed to
    # extract a specific slice, or even to loop over *all* cross section slices.
    cross_section = theta.slices(['grid_longitude', 'model_level_number']).next()
    
    qplt.contourf(cross_section, coords=['grid_longitude', 'altitude'])
    plt.show()
    
    # Now do the equivalent plot, only against model level
    plt.figure()
    
    qplt.contourf(cross_section, coords=['grid_longitude', 'model_level_number'])
    plt.show()
def main():
    fname = iris.sample_data_path('rotated_pole.nc')
    temperature = iris.load_strict(fname)

    # Calculate the lat lon range and buffer it by 10 degrees
    lat_range, lon_range = iris.analysis.cartography.lat_lon_range(temperature)
    lat_range = lat_range[0] - 10, lat_range[1] + 10
    lon_range = lon_range[0] - 10, lon_range[1] + 10

    # Plot #1: Point plot showing data values & a colorbar
    plt.figure()
    iplt.map_setup(temperature, lat_range=lat_range, lon_range=lon_range)
    points = qplt.points(temperature, c=temperature.data)
    cb = plt.colorbar(points, orientation='horizontal')
    cb.set_label(temperature.units)
    iplt.gcm().drawcoastlines()
    plt.show()

    # Plot #2: Contourf of the point based data
    plt.figure()
    iplt.map_setup(temperature, lat_range=lat_range, lon_range=lon_range)
    qplt.contourf(temperature, 15)
    iplt.gcm().drawcoastlines()
    plt.show()

    # Plot #3: Contourf overlayed by coloured point data
    plt.figure()
    iplt.map_setup(temperature, lat_range=lat_range, lon_range=lon_range)
    qplt.contourf(temperature)
    iplt.points(temperature, c=temperature.data)
    iplt.gcm().drawcoastlines()
    plt.show()

    # For the purposes of this example, add some bounds to the latitude and longitude
    temperature.coord('grid_latitude').guess_bounds()
    temperature.coord('grid_longitude').guess_bounds()

    # Plot #4: Block plot
    plt.figure()
    iplt.map_setup(temperature, lat_range=lat_range, lon_range=lon_range)
    iplt.pcolormesh(temperature)
    iplt.gcm().bluemarble()
    iplt.gcm().drawcoastlines()
    plt.show()
Ejemplo n.º 37
0
def main():
    # Load the "total electron content" cube.
    filename = iris.sample_data_path('space_weather.nc')
    cube = iris.load_strict(filename, 'total electron content')

    # Explicitly mask negative electron content.
    cube.data = np.ma.masked_less(cube.data, 0)

    # Currently require to remove the multi-dimensional
    # latitude and longitude coordinates for Iris plotting.
    cube.remove_coord('latitude')
    cube.remove_coord('longitude')

    # Plot the cube using one hundred colour levels.
    qplt.contourf(cube, 100)
    plt.title('Total Electron Content')
    plt.xlabel('longitude / degrees')
    plt.ylabel('latitude / degrees')
    iplt.gcm().bluemarble(zorder=-1)
    iplt.gcm().drawcoastlines()
    plt.show()
Ejemplo n.º 38
0
def main():
    # extract surface temperature cubes which have an ensemble member coordinate, adding appropriate lagged ensemble metadata
    surface_temp = iris.load_strict(
        iris.sample_data_path('GloSea4', 'ensemble_???.pp'),
        iris.Constraint('surface_temperature', realization=lambda value: True),
        callback=realization_metadata,
    )

    # ----------------------------------------------------------------------------------------------------------------
    # Plot #1: Ensemble postage stamps
    # ----------------------------------------------------------------------------------------------------------------

    # for the purposes of this example, take the last time element of the cube
    last_timestep = surface_temp[:, -1, :, :]

    # Make 50 evenly spaced levels which span the dataset
    contour_levels = numpy.linspace(numpy.min(last_timestep.data),
                                    numpy.max(last_timestep.data), 50)

    # Create a wider than normal figure to support our many plots
    plt.figure(figsize=(12, 6), dpi=100)

    # Also manually adjust the spacings which are used when creating subplots
    plt.gcf().subplots_adjust(hspace=0.05,
                              wspace=0.05,
                              top=0.95,
                              bottom=0.05,
                              left=0.075,
                              right=0.925)

    # iterate over all possible latitude longitude slices
    for cube in last_timestep.slices(['latitude', 'longitude']):

        # get the ensemble member number from the ensemble coordinate
        ens_member = cube.coord('realization').points[0]

        # plot the data in a 4x4 grid, with each plot's position in the grid being determined by ensemble member number
        # the special case for the 13th ensemble member is to have the plot at the bottom right
        if ens_member == 13:
            plt.subplot(4, 4, 16)
        else:
            plt.subplot(4, 4, ens_member + 1)

        cf = iplt.contourf(cube, contour_levels)

        # add coastlines
        m = iplt.gcm()
        m.drawcoastlines()

    # make an axes to put the shared colorbar in
    colorbar_axes = plt.gcf().add_axes([0.35, 0.1, 0.3, 0.05])
    colorbar = plt.colorbar(cf, colorbar_axes, orientation='horizontal')
    colorbar.set_label('%s' % last_timestep.units)

    # limit the colorbar to 8 tick marks
    import matplotlib.ticker
    colorbar.locator = matplotlib.ticker.MaxNLocator(8)
    colorbar.update_ticks()

    # get the time for the entire plot
    time_coord = last_timestep.coord('time')
    time = time_coord.units.num2date(time_coord.points[0])

    # set a global title for the postage stamps with the date formated by "monthname year"
    plt.suptitle('Surface temperature ensemble forecasts for %s' %
                 time.strftime('%B %Y'))

    iplt.show()

    # ----------------------------------------------------------------------------------------------------------------
    # Plot #2: ENSO plumes
    # ----------------------------------------------------------------------------------------------------------------

    # Nino 3.4 lies between: 170W and 120W, 5N and 5S, so define a constraint which matches this
    nino_3_4_constraint = iris.Constraint(
        longitude=lambda v: -170 + 360 <= v <= -120 + 360,
        latitude=lambda v: -5 <= v <= 5)

    nino_cube = surface_temp.extract(nino_3_4_constraint)

    # Subsetting a circular longitude coordinate always results in a circular coordinate, so set the coordinate to be non-circular
    nino_cube.coord('longitude').circular = False

    # Calculate the horizontal mean for the nino region
    mean = nino_cube.collapsed(['latitude', 'longitude'], iris.analysis.MEAN)

    # Calculate the ensemble mean of the horizontal mean. To do this, remove the "forecast_period" and
    # "forecast_reference_time" coordinates which span both "relalization" and "time".
    mean.remove_coord("forecast_reference_time")
    mean.remove_coord("forecast_period")
    ensemble_mean = mean.collapsed('realization', iris.analysis.MEAN)

    # take the ensemble mean from each ensemble member
    mean -= ensemble_mean.data

    plt.figure()

    for ensemble_member in mean.slices(['time']):
        # draw each ensemble member as a dashed line in black
        iplt.plot(ensemble_member, '--k', coords=['time'])

    plt.title('Mean temperature anomaly for ENSO 3.4 region')
    plt.xlabel('Time')
    plt.ylabel('Temperature anomaly / K')

    plt.show()
Ejemplo n.º 39
0
 def load_match(self, files, constraints):
     cubes = iris.load_strict(files, constraints)
     return cubes
Ejemplo n.º 40
0
def main():
    # Load e1 and a1 using the callback to update the metadata
    e1 = iris.load_strict(iris.sample_data_path('E1.2098.pp'),
                          callback=cop_metadata_callback)
    a1b = iris.load_strict(iris.sample_data_path('A1B.2098.pp'),
                           callback=cop_metadata_callback)

    # Load the global average data and add an 'Experiment' coord it
    global_avg = iris.load_strict(iris.sample_data_path('pre-industrial.pp'))

    # Define evenly spaced contour levels: -2.5, -1.5, ... 15.5, 16.5 with the specific colours
    levels = numpy.arange(20) - 2.5
    red = numpy.array([
        0, 0, 221, 239, 229, 217, 239, 234, 228, 222, 205, 196, 161, 137, 116,
        89, 77, 60, 51
    ]) / 256.
    green = numpy.array([
        16, 217, 242, 243, 235, 225, 190, 160, 128, 87, 72, 59, 33, 21, 29, 30,
        30, 29, 26
    ]) / 256.
    blue = numpy.array([
        255, 255, 243, 169, 99, 51, 63, 37, 39, 21, 27, 23, 22, 26, 29, 28, 27,
        25, 22
    ]) / 256.

    # Put those colours into an array which can be passed to conourf as the specific colours for each level
    colors = numpy.array([red, green, blue]).T

    # Subtract the global

    # Iterate over each latitude longitude slice for both e1 and a1b scenarios simultaneously
    for e1_slice, a1b_slice in itertools.izip(
            e1.slices(['latitude', 'longitude']),
            a1b.slices(['latitude', 'longitude'])):

        time_coord = a1b_slice.coord('time')

        # Calculate the difference from the mean
        delta_e1 = e1_slice - global_avg
        delta_a1b = a1b_slice - global_avg

        # Make a wider than normal figure to house two maps side-by-side
        fig = plt.figure(figsize=(12, 5))

        # Get the time datetime from the coordinate
        time = time_coord.units.num2date(time_coord.points[0])
        # Set a title for the entire figure, giving the time in a nice format of "MonthName Year". Also, set the y value for the
        # title so that it is not tight to the top of the plot.
        fig.suptitle('Annual Temperature Predictions for ' +
                     time.strftime("%Y"),
                     y=0.9,
                     fontsize=18)

        # Add the first subplot showing the E1 scenario
        plt.subplot(121)
        plt.title('HadGEM2 E1 Scenario', fontsize=10)
        iplt.contourf(delta_e1,
                      levels,
                      colors=colors,
                      linewidth=0,
                      extend='both')
        current_map = iplt.gcm()
        current_map.drawcoastlines()
        # get the current axes' subplot for use later on
        plt1_ax = plt.gca()

        # Add the second subplot showing the A1B scenario
        plt.subplot(122)
        plt.title('HadGEM2 A1B-Image Scenario', fontsize=10)
        contour_result = iplt.contourf(delta_a1b,
                                       levels,
                                       colors=colors,
                                       linewidth=0,
                                       extend='both')
        current_map = iplt.gcm()
        current_map.drawcoastlines()
        # get the current axes' subplot for use later on
        plt2_ax = plt.gca()

        # Now add a colourbar who's leftmost point is the same as the leftmost point of the left hand plot
        # and rightmost point is the rightmost point of the right hand plot

        # Get the positions of the 2nd plot and the left position of the 1st plot
        left, bottom, width, height = plt2_ax.get_position().bounds
        first_plot_left = plt1_ax.get_position().bounds[0]

        # the width of the colorbar should now be simple
        width = left - first_plot_left + width

        # Add axes to the figure, to place the colour bar
        colorbar_axes = fig.add_axes(
            [first_plot_left, bottom + 0.07, width, 0.03])

        # Add the colour bar
        cbar = plt.colorbar(contour_result,
                            colorbar_axes,
                            orientation='horizontal')

        # Label the colour bar and add ticks
        cbar.set_label(e1_slice.units)
        cbar.ax.tick_params(length=0)

        plt.show()
Ejemplo n.º 41
0
import matplotlib.pyplot as plt

import iris
import iris.quickplot as qplt
import iris.plot as iplt

fname = iris.sample_data_path('air_temp.pp')
temperature_cube = iris.load_strict(fname)

# put bounds on the latitude and longitude coordinates
temperature_cube.coord('latitude').guess_bounds()
temperature_cube.coord('longitude').guess_bounds()

# Draw the contour with 25 levels
qplt.pcolormesh(temperature_cube)

# Get the map created by pcolormesh
current_map = iplt.gcm()

# Add coastlines to the map
current_map.drawcoastlines()

plt.show()