Example #1
0
 def test_too_many(self):
     paths = (
         tests.get_data_path(['PP', 'aPPglob1', 'global.pp']),
         tests.get_data_path(['PP', 'aPPglob1', 'gl?bal.pp'])
     )
     with self.assertRaises(iris.exceptions.ConstraintMismatchError):
         iris.load_cube(paths)
Example #2
0
 def test_too_many(self):
     paths = (
         tests.get_data_path(["PP", "aPPglob1", "global.pp"]),
         tests.get_data_path(["PP", "aPPglob1", "gl?bal.pp"]),
     )
     with self.assertRaises(iris.exceptions.ConstraintMismatchError):
         iris.load_cube(paths)
 def setUp(self):
     self.profile = ap.Namespace(**conf.profiles["default"])
     self.data = serveupimage.loadCube(os.path.join(fileDir, "data", "test_input.nc"),
                                       conf.topog_file,
                                       self.profile.data_constraint)
     self.proced_data = iris.load_cube(os.path.join(fileDir, "data", "proced_data.nc"))
     self.tiled_data = iris.load_cube(os.path.join(fileDir, "data", "tiled_data.nc")).data
def proc_cube(cube):
    for i, frt_cube in enumerate(cube.slices_over("time")):
        print "Processing timestep ", i, "...",
        img_array = imageproc.tileArray(frt_cube.data)
        img_array /= img_array.max()
        img_array *= 255
        print "Writing image"
        with open("data%03d.png" % i, "wb") as img:
            imageproc.writePng(img_array, img, nchannels=3, alpha=False)
    
    print "Writing video"
    sp.call(["avconv", "-y",
            "-r", "1", "-i", "data%03d.png",
            "-r", "1", "-vcodec", os.getenv("CODEC", "libtheora"), "-qscale:v", os.getenv("QUALITY", 2), os.getenv("FILE_OUT", "out.ogv"])
    print "Cleaning up"
    fs = glob.glob("./data???.png")
    for f in fs:
        os.remove(f)
        
if __name__=="__main__":
    varname = os.getenv("VAR_NAME")
    if varname != None:
        cube = iris.load_cube(os.getenv("FILE_IN"), iris.Constraint(varname))
    else:
        cube = iris.load_cube(os.getenv("FILE_IN"))
    proc_cube(cube)
 def test_invalid_signature_callback(self):
     def invalid_callback(cube, ):
         # should never get here
         pass
     fname = tests.get_data_path(('PP', 'aPPglob1', 'global.pp'))
     with self.assertRaises(TypeError):
         iris.load_cube(fname, callback=invalid_callback)
Example #6
0
    def test_custom_rules(self):
        # Test custom rule evaluation.
        # Default behaviour
        data_path = tests.get_data_path(('PP', 'aPPglob1', 'global.pp'))
        cube = iris.load_cube(data_path)
        self.assertEqual(cube.standard_name, 'air_temperature')

        # Custom behaviour
        temp_path = iris.util.create_temp_filename()
        f = open(temp_path, 'w')
        f.write('\n'.join((
            'IF',
            'f.lbuser[3] == 16203',
            'THEN',
            'CMAttribute("standard_name", None)', 
            'CMAttribute("long_name", "customised")'))) 
        f.close()
        iris.fileformats.pp.add_load_rules(temp_path)
        cube = iris.load_cube(data_path)
        self.assertEqual(cube.name(), 'customised')
        os.remove(temp_path)
        
        # Back to default
        iris.fileformats.pp.reset_load_rules()
        cube = iris.load_cube(data_path)
        self.assertEqual(cube.standard_name, 'air_temperature')
Example #7
0
    def test_custom_rules(self):
        # Test custom rule evaluation.
        # Default behaviour
        data_path = tests.get_data_path(("PP", "aPPglob1", "global.pp"))
        cube = iris.load_cube(data_path)
        self.assertEqual(cube.standard_name, "air_temperature")

        # Custom behaviour
        temp_path = iris.util.create_temp_filename()
        f = open(temp_path, "w")
        f.write(
            "\n".join(
                (
                    "IF",
                    "f.lbuser[3] == 16203",
                    "THEN",
                    'CMAttribute("standard_name", None)',
                    'CMAttribute("long_name", "customised")',
                )
            )
        )
        f.close()
        iris.fileformats.pp.add_load_rules(temp_path)
        cube = iris.load_cube(data_path)
        self.assertEqual(cube.name(), "customised")
        os.remove(temp_path)

        # Back to default
        iris.fileformats.pp.reset_load_rules()
        cube = iris.load_cube(data_path)
        self.assertEqual(cube.standard_name, "air_temperature")
Example #8
0
def main():
    # Load the u and v components of wind from a pp file
    infile = iris.sample_data_path('wind_speed_lake_victoria.pp')

    uwind = iris.load_cube(infile, 'x_wind')
    vwind = iris.load_cube(infile, 'y_wind')

    ulon = uwind.coord('longitude')
    vlon = vwind.coord('longitude')

    # The longitude points go from 180 to 540, so subtract 360 from them
    ulon.points = ulon.points - 360.0
    vlon.points = vlon.points - 360.0

    # Create a cube containing the wind speed
    windspeed = (uwind ** 2 + vwind ** 2) ** 0.5
    windspeed.rename('windspeed')

    x = ulon.points
    y = uwind.coord('latitude').points
    u = uwind.data
    v = vwind.data

    # Set up axes to show the lake
    lakes = cfeat.NaturalEarthFeature('physical', 'lakes', '50m',
                                      facecolor='none')

    plt.figure()
    ax = plt.axes(projection=ccrs.PlateCarree())
    ax.add_feature(lakes)

    # Get the coordinate reference system used by the data
    transform = ulon.coord_system.as_cartopy_projection()

    # Plot the wind speed as a contour plot
    qplt.contourf(windspeed, 20)

    # Add arrows to show the wind vectors
    plt.quiver(x, y, u, v, pivot='middle', transform=transform)

    plt.title("Wind speed over Lake Victoria")
    qplt.show()

    # Normalise the data for uniform arrow size
    u_norm = u / np.sqrt(u ** 2.0 + v ** 2.0)
    v_norm = v / np.sqrt(u ** 2.0 + v ** 2.0)

    plt.figure()
    ax = plt.axes(projection=ccrs.PlateCarree())
    ax.add_feature(lakes)

    qplt.contourf(windspeed, 20)

    plt.quiver(x, y, u_norm, v_norm, pivot='middle', transform=transform)

    plt.title("Wind speed over Lake Victoria")
    qplt.show()
Example #9
0
 def test_no_time_coord_in_cubes(self):
     path0 = tests.get_data_path(('PP', 'aPPglob1', 'global.pp'))
     path1 = tests.get_data_path(('PP', 'aPPglob1', 'global_t_forecast.pp'))
     cube0 = iris.load_cube(path0)
     cube1 = iris.load_cube(path1)
     cubes = iris.cube.CubeList([cube0, cube1])
     result = copy.copy(cubes)
     unify_time_units(result)
     self.assertEqual(cubes, result)
Example #10
0
 def test_save_load_loop(self):
     # Tests an issue where the variable names in the formula
     # terms changed to the standard_names instead of the variable names
     # when loading a previously saved cube.
     with self.temp_filename(suffix=".nc") as filename, self.temp_filename(suffix=".nc") as other_filename:
         iris.save(self.cube, filename)
         cube = iris.load_cube(filename)
         iris.save(cube, other_filename)
         other_cube = iris.load_cube(other_filename)
         self.assertEqual(cube, other_cube)
Example #11
0
    def test_process_flags(self):
        # Test that process flags are created for correct values of lbproc
        orig_file = tests.get_data_path(('PP', 'aPPglob1', 'global.pp'))

        # Values that result in process flags attribute NOT being created
        omit_process_flags_values = (64, 128, 4096, 8192)

        # Test single flag values
        for value, _ in iris.fileformats.pp.LBPROC_PAIRS:
            f = next(iris.fileformats.pp.load(orig_file))
            f.lbproc = value # set value

            # Write out pp file
            temp_filename = iris.util.create_temp_filename(".pp")
            with open(temp_filename, 'wb') as temp_fh:
                f.save(temp_fh)

            # Load pp file
            cube = iris.load_cube(temp_filename)

            if value in omit_process_flags_values:
                # Check ukmo__process_flags attribute not created
                self.assertEqual(cube.attributes.get("ukmo__process_flags", None), None)
            else:
                # Check ukmo__process_flags attribute contains correct values
                self.assertIn(iris.fileformats.pp.lbproc_map[value], cube.attributes["ukmo__process_flags"])

            os.remove(temp_filename)

        # Test multiple flag values
        multiple_bit_values = ((128, 32), (4096, 1024), (8192, 1024))

        # Maps lbproc value to the process flags that should be created
        multiple_map = {sum(x) : [iris.fileformats.pp.lbproc_map[y] for y in x] for x in multiple_bit_values}

        for bit_values in multiple_bit_values:
            f = next(iris.fileformats.pp.load(orig_file))
            f.lbproc = sum(bit_values) # set value

            # Write out pp file
            temp_filename = iris.util.create_temp_filename(".pp")
            with open(temp_filename, 'wb') as temp_fh:
                f.save(temp_fh)

            # Load pp file
            cube = iris.load_cube(temp_filename)

            # Check the process flags created
            self.assertEqual(set(cube.attributes['ukmo__process_flags']),
                             set(multiple_map[sum(bit_values)]),
                             'Mismatch between expected and actual process '
                             'flags.')

            os.remove(temp_filename)
def preprocessing(params):
    """
    Preprocessing.
    """
    global global_topography, station_vgrid
    
    global_topography = iris.load_cube(
                            params['global_topography_datafile']
                        )
    station_vgrid = iris.load_cube(
                        params['station_vertical_grid_file']
                    )
Example #13
0
    def test_trajectory(self):
        cube = iris.load_cube(iris.sample_data_path('air_temp.pp'))

        # extract a trajectory
        xpoint = cube.coord('longitude').points[:10]
        ypoint = cube.coord('latitude').points[:10]
        sample_points = [('latitude', xpoint), ('longitude', ypoint)]
        traj = iris.analysis.trajectory.interpolate(cube, sample_points)

        # save, reload and check
        with self.temp_filename(suffix='.nc') as temp_filename:
            iris.save(traj, temp_filename)
            reloaded = iris.load_cube(temp_filename)
            self.assertCML(reloaded, ('netcdf', 'save_load_traj.cml'))
Example #14
0
 def test_make_same_grid_source_larger(self):
     
     filename1 = '/home/michael/Scitools/iris-sample-data/sample_data/air_temp.pp'
     source_cube = iris.load_cube(filename1)
     
     filename2 = '/home/michael/Scitools/iris-sample-data/sample_data/pre-industrial.pp'
     grid_cube = iris.load_cube(filename2)
     
     regridded = lib.make_same_grid(source_cube, grid_cube)
     
     grid_shape = regridded.data.shape
     expected_shape = grid_cube.data.shape
     
     self.assertEqual(grid_shape, expected_shape)
Example #15
0
    def test_regrid_low_dimensional(self):
        theta = load_cube(
            self.theta_p_alt_path,
            (self.theta_constraint
             & self.level_constraint
             & self.forecast_constraint)
        )
        airpress = load_cube(
            self.theta_p_alt_path,
            (self.airpress_constraint
             & self.level_constraint
             & self.forecast_constraint)
        )
        TestRegrid.patch_data(theta)
        TestRegrid.patch_data(airpress)

        # 0-dimensional
        theta_0 = theta[0, 0]
        airpress_0 = airpress[0, 0]
        theta0_regridded = theta_0.regridded(airpress_0, mode='nearest')
        airpress0_regridded = airpress_0.regridded(theta_0, mode='nearest')
        self.assertEqual(theta_0, theta0_regridded)
        self.assertEqual(airpress_0, airpress0_regridded)
        self.assertCMLApproxData(
            theta0_regridded,
            ('regrid', 'theta_on_airpress_0d.cml'))
        self.assertCMLApproxData(
            airpress0_regridded,
            ('regrid', 'airpress_on_theta_0d.cml'))

        # 1-dimensional
        theta_1 = theta[0, 1:4]
        airpress_1 = airpress[0, 0:4]
        self.assertCMLApproxData(
            theta_1.regridded(airpress_1, mode='nearest'),
            ('regrid', 'theta_on_airpress_1d.cml'))
        self.assertCMLApproxData(
            airpress_1.regridded(theta_1, mode='nearest'),
            ('regrid', 'airpress_on_theta_1d.cml'))

        # 2-dimensional
        theta_2 = theta[1:3, 1:4]
        airpress_2 = airpress[0:4, 0:4]
        self.assertCMLApproxData(
            theta_2.regridded(airpress_2, mode='nearest'),
            ('regrid', 'theta_on_airpress_2d.cml'))
        self.assertCMLApproxData(
            airpress_2.regridded(theta_2, mode='nearest'),
            ('regrid', 'airpress_on_theta_2d.cml'))
Example #16
0
def main():
    # Load data into three Cubes, one for each set of PP files
    e1 = iris.load_cube(iris.sample_data_path('E1_north_america.nc'))
    
    a1b = iris.load_cube(iris.sample_data_path('A1B_north_america.nc'))
    
    # load in the global pre-industrial mean temperature, and limit the domain to
    # the same North American region that e1 and a1b are at.
    north_america = iris.Constraint(
                                    longitude=lambda v: 225 <= v <= 315,
                                    latitude=lambda v: 15 <= v <= 60,
                                    )
    pre_industrial = iris.load_cube(iris.sample_data_path('pre-industrial.pp'),
                                    north_america)
    
    pre_industrial_mean = pre_industrial.collapsed(['latitude', 'longitude'], iris.analysis.MEAN)
    e1_mean = e1.collapsed(['latitude', 'longitude'], iris.analysis.MEAN)
    a1b_mean = a1b.collapsed(['latitude', 'longitude'], iris.analysis.MEAN)
    
    # Show ticks 30 years apart
    plt.gca().xaxis.set_major_locator(mdates.YearLocator(30))
    
    # Label the ticks with year data
    plt.gca().format_xdata = mdates.DateFormatter('%Y')
    
    # Plot the datasets
    qplt.plot(e1_mean, label='E1 scenario', lw=1.5, color='blue')
    qplt.plot(a1b_mean, label='A1B-Image scenario', lw=1.5, color='red')

    # Draw a horizontal line showing the pre industrial mean
    plt.axhline(y=pre_industrial_mean.data, color='gray', linestyle='dashed', label='pre-industrial', lw=1.5)
    
    # Establish where r and t have the same data, i.e. the observations
    common = np.where(a1b_mean.data == e1_mean.data)[0]
    observed = a1b_mean[common]
    
    # Plot the observed data
    qplt.plot(observed, label='observed', color='black', lw=1.5)

    # Add a legend and title
    plt.legend(loc="upper left")
    plt.title('North American mean air temperature', fontsize=18)
    
    plt.xlabel('Time / year')
    
    plt.grid()

    iplt.show()
Example #17
0
    def test_process_flags(self):
        # Test single process flags
        for _, process_desc in iris.fileformats.pp.LBPROC_PAIRS[1:]:
            # Get basic cube and set process flag manually
            ll_cube = stock.lat_lon_cube()
            ll_cube.attributes["ukmo__process_flags"] = (process_desc,)

            # Save cube to netCDF
            temp_filename = iris.util.create_temp_filename(".nc")
            iris.save(ll_cube, temp_filename)

            # Reload cube
            cube = iris.load_cube(temp_filename)

            # Check correct number and type of flags
            self.assertTrue(len(cube.attributes["ukmo__process_flags"]) == 1,
                            "Mismatch in number of process flags.")
            process_flag = cube.attributes["ukmo__process_flags"][0]
            self.assertEquals(process_flag, process_desc)

            os.remove(temp_filename)

        # Test mutiple process flags
        multiple_bit_values = ((128, 64), (4096, 1024), (8192, 1024))

        # Maps lbproc value to the process flags that should be created
        multiple_map = {bits: [iris.fileformats.pp.lbproc_map[bit] for
                               bit in bits] for bits in multiple_bit_values}

        for bits, descriptions in multiple_map.iteritems():

            ll_cube = stock.lat_lon_cube()
            ll_cube.attributes["ukmo__process_flags"] = descriptions

            # Save cube to netCDF
            temp_filename = iris.util.create_temp_filename(".nc")
            iris.save(ll_cube, temp_filename)

            # Reload cube
            cube = iris.load_cube(temp_filename)

            # Check correct number and type of flags
            process_flags = cube.attributes["ukmo__process_flags"]
            self.assertTrue(len(process_flags) == len(bits), 'Mismatch in '
                            'number of process flags.')
            self.assertEquals(set(process_flags), set(descriptions))

            os.remove(temp_filename)
Example #18
0
    def __init__(self, file_path = "", var_name = "",
                 bathymetry_path = "/skynet3_rech1/huziy/NEMO_OFFICIAL/dev_v3_4_STABLE_2012/NEMOGCM/CONFIG/GLK_LIM3_Michigan/EXP00/bathy_meter.nc"):
        """
        :param file_path:
        :param var_name:
        :param bathymetry_path: used to mask land points
        """
        self.current_time_frame = -1
        self.var_name = var_name

        self.cube = iris.load_cube(file_path, constraint=iris.Constraint(cube_func=lambda c: c.var_name == var_name))
        self.lons, self.lats = cartography.get_xy_grids(self.cube)

        lons2d_gl, lats2d_gl = nemo_commons.get_2d_lons_lats_from_nemo(path=bathymetry_path)
        mask_gl = nemo_commons.get_mask(path=bathymetry_path)

        xs, ys, zs = lat_lon.lon_lat_to_cartesian(lons2d_gl.flatten(), lats2d_gl.flatten())
        xt, yt, zt = lat_lon.lon_lat_to_cartesian(self.lons.flatten(), self.lats.flatten())

        tree = cKDTree(list(zip(xs, ys, zs)))
        dists, indices = tree.query(list(zip(xt, yt, zt)))

        self.mask = mask_gl.flatten()[indices].reshape(self.lons.shape)


        self.nt = self.cube.shape[0]
        assert isinstance(self.cube, Cube)
        print(self.nt)
Example #19
0
 def test_load_stereographic_grid(self):
     # Test loading a single CF-netCDF file with a stereographic
     # grid_mapping.
     cube = iris.load_cube(
         tests.get_data_path(('NetCDF', 'stereographic',
                              'toa_brightness_temperature.nc')))
     self.assertCML(cube, ('netcdf', 'netcdf_stereo.cml'))
Example #20
0
 def test_uint32_data_netcdf3(self):
     self.cube.data = self.cube.data.astype(np.uint32)
     with self.temp_filename(suffix='.nc') as filename:
         iris.save(self.cube, filename, netcdf_format='NETCDF3_CLASSIC')
         reloaded = iris.load_cube(filename)
         self.assertCML(reloaded, ('netcdf',
                                   'uint32_data_netcdf3.cml'))
Example #21
0
    def test_deferred_loading(self):
        # Test exercising CF-netCDF deferred loading and deferred slicing.
        # shape (31, 161, 320)
        cube = iris.load_cube(tests.get_data_path(
            ('NetCDF', 'global', 'xyt', 'SMALL_total_column_co2.nc')))

        # Consecutive index on same dimension.
        self.assertCML(cube[0], ('netcdf', 'netcdf_deferred_index_0.cml'))
        self.assertCML(cube[0][0], ('netcdf', 'netcdf_deferred_index_1.cml'))
        self.assertCML(cube[0][0][0], ('netcdf',
                                       'netcdf_deferred_index_2.cml'))

        # Consecutive slice on same dimension.
        self.assertCML(cube[0:20], ('netcdf', 'netcdf_deferred_slice_0.cml'))
        self.assertCML(cube[0:20][0:10], ('netcdf',
                                          'netcdf_deferred_slice_1.cml'))
        self.assertCML(cube[0:20][0:10][0:5], ('netcdf',
                                               'netcdf_deferred_slice_2.cml'))

        # Consecutive tuple index on same dimension.
        self.assertCML(cube[(0, 8, 4, 2, 14, 12), ],
                       ('netcdf', 'netcdf_deferred_tuple_0.cml'))
        self.assertCML(cube[(0, 8, 4, 2, 14, 12), ][(0, 2, 4, 1), ],
                       ('netcdf', 'netcdf_deferred_tuple_1.cml'))
        subcube = cube[(0, 8, 4, 2, 14, 12), ][(0, 2, 4, 1), ][(1, 3), ]
        self.assertCML(subcube, ('netcdf', 'netcdf_deferred_tuple_2.cml'))

        # Consecutive mixture on same dimension.
        self.assertCML(cube[0:20:2][(9, 5, 8, 0), ][3],
                       ('netcdf', 'netcdf_deferred_mix_0.cml'))
        self.assertCML(cube[(2, 7, 3, 4, 5, 0, 9, 10), ][2:6][3],
                       ('netcdf', 'netcdf_deferred_mix_0.cml'))
        self.assertCML(cube[0][(0, 2), (1, 3)],
                       ('netcdf', 'netcdf_deferred_mix_1.cml'))
Example #22
0
 def test_load_tmerc_grid_and_clim_bounds(self):
     # Test loading a single CF-netCDF file with a transverse Mercator
     # grid_mapping and a time variable with climatology.
     cube = iris.load_cube(
         tests.get_data_path(('NetCDF', 'transverse_mercator',
                              'tmean_1910_1910.nc')))
     self.assertCML(cube, ('netcdf', 'netcdf_tmerc_and_climatology.cml'))
Example #23
0
 def test_load_lcc_grid(self):
     # Test loading a single CF-netCDF file with Lambert conformal conic
     # grid mapping.
     cube = iris.load_cube(
         tests.get_data_path(('NetCDF', 'lambert_conformal',
                              'test_lcc.nc')))
     self.assertCML(cube, ('netcdf', 'netcdf_lcc.cml'))
Example #24
0
 def test_load_rotated_xy_land(self):
     # Test loading single xy rotated pole CF-netCDF file.
     cube = iris.load_cube(tests.get_data_path(
         ('NetCDF', 'rotated', 'xy', 'rotPole_landAreaFraction.nc')))
     # Make sure the AuxCoords have lazy data.
     self.assertTrue(is_lazy_data(cube.coord('latitude').core_points()))
     self.assertCML(cube, ('netcdf', 'netcdf_rotated_xy_land.cml'))
Example #25
0
 def test_load_rotated_xyt_precipitation(self):
     # Test loading single xyt rotated pole CF-netCDF file.
     cube = iris.load_cube(
         tests.get_data_path(('NetCDF', 'rotated', 'xyt',
                              'small_rotPole_precipitation.nc')))
     self.assertCML(cube, ('netcdf',
                           'netcdf_rotated_xyt_precipitation.cml'))
Example #26
0
    def test_unmasked(self):
        tif_header = 'SMALL_total_column_co2.nc.tif_header.txt'
        fin = tests.get_data_path(('NetCDF', 'global', 'xyt',
                                   'SMALL_total_column_co2.nc'))
        cube = iris.load_cube(fin)[0]
        # PIL doesn't support float64
        cube.data = cube.data.astype('f4')

        # Ensure longitude values are continuous and monotonically increasing,
        # and discard the 'half cells' at the top and bottom of the UM output
        # by extracting a subset.
        east = iris.Constraint(longitude=lambda cell: cell < 180)
        non_edge = iris.Constraint(latitude=lambda cell: -90 < cell < 90)
        cube = cube.extract(east & non_edge)
        cube.coord('longitude').guess_bounds()
        cube.coord('latitude').guess_bounds()
        self.check_tiff(cube, tif_header)

        # Check again with the latitude coordinate (and the corresponding
        # cube.data) inverted. The output should be the same as before.
        coord = cube.coord('latitude')
        coord.points = coord.points[::-1]
        coord.bounds = None
        coord.guess_bounds()
        cube.data = cube.data[::-1, :]
        self.check_tiff(cube, tif_header)
Example #27
0
 def test_plot_tmerc(self):
     filename = tests.get_data_path(('NetCDF', 'transverse_mercator',
                                     'tmean_1910_1910.nc'))
     self.cube = iris.load_cube(filename)
     iplt.pcolormesh(self.cube[0])
     plt.gca().coastlines()
     self.check_graphic()
Example #28
0
 def setUp(self):
     path = tests.get_data_path(('PP', 'simple_pp', 'global.pp'))
     self.cube_2d = iris.load_cube(path)
     # Generate the unicode cube up here now it's used in two tests.
     unicode_str = unichr(40960) + u'abcd' + unichr(1972)
     self.unicode_cube = iris.tests.stock.simple_1d()
     self.unicode_cube.attributes['source'] = unicode_str
Example #29
0
    def test_perturbation(self):
        path = tests.get_data_path(('NetCDF', 'global', 'xyt',
                                    'SMALL_hires_wind_u_for_ipcc4.nc'))
        cube = load_cube(path)
        # trim to 1 time and regular lats
        cube = cube[0, 12:144, :]
        crs = iris.coord_systems.GeogCS(6371229)
        cube.coord('latitude').coord_system = crs
        cube.coord('longitude').coord_system = crs
        # add a realization coordinate
        cube.add_aux_coord(iris.coords.DimCoord(points=1,
                                                standard_name='realization',
                                                units='1'))
        with self.temp_filename('testPDT11.GRIB2') as temp_file_path:
            iris.save(cube, temp_file_path)
            # Get a grib_dump of the output file.
            dump_text = check_output(('grib_dump -O -wcount=1 ' +
                                      temp_file_path),
                                     shell=True).decode()

            # Check that various aspects of the saved file are as expected.
            expect_strings = (
                'editionNumber = 2',
                'gridDefinitionTemplateNumber = 0',
                'productDefinitionTemplateNumber = 11',
                'perturbationNumber = 1',
                'typeOfStatisticalProcessing = 0',
                'numberOfForecastsInEnsemble = 255')
            for expect in expect_strings:
                self.assertIn(expect, dump_text)
Example #30
0
def main():
    fname = iris.sample_data_path('ostia_monthly.nc')
    
    # load a single cube of surface temperature between +/- 5 latitude
    cube = iris.load_cube(fname, iris.Constraint('surface_temperature', latitude=lambda v: -5 < v < 5))
    
    # Take the mean over latitude
    cube = cube.collapsed('latitude', iris.analysis.MEAN)
    
    # Now that we have our data in a nice way, lets create the plot
    # contour with 20 levels
    qplt.contourf(cube, 20)
    
    # Put a custom label on the y axis 
    plt.ylabel('Time / years')
    
    # Stop matplotlib providing clever axes range padding
    plt.axis('tight')
    
    # As we are plotting annual variability, put years as the y ticks
    plt.gca().yaxis.set_major_locator(mdates.YearLocator())
    
    # And format the ticks to just show the year
    plt.gca().yaxis.set_major_formatter(mdates.DateFormatter('%Y'))
    
    plt.show()
Example #31
0
"""
import cartopy.crs as ccrs
import cartopy.feature as cfeature
import iris
import iris.plot as iplt
import matplotlib.pyplot as plt
import numpy as np

from eofs.iris import Eof
from eofs.examples import example_data_path

# Read SST anomalies using the iris module. The file contains November-March
# averages of SST anomaly in the central and northern Pacific.
filename = example_data_path('sst_ndjfm_anom.nc')
sst = iris.load_cube(filename)

# Create an EOF solver to do the EOF analysis. Square-root of cosine of
# latitude weights are applied before the computation of EOFs.
solver = Eof(sst, weights='coslat')

# Retrieve the leading EOF, expressed as the correlation between the leading
# PC time series and the input SST anomalies at each grid point, and the
# leading PC time series itself.
eof1 = solver.eofsAsCorrelation(neofs=1)
pc1 = solver.pcs(npcs=1, pcscaling=1)

# Plot the leading EOF expressed as correlation in the Pacific domain.
clevs = np.linspace(-1, 1, 11)
ax = plt.axes(projection=ccrs.PlateCarree(central_longitude=190))
fill = iplt.contourf(eof1[0], clevs, cmap=plt.cm.RdBu_r)
Example #32
0
import matplotlib.pyplot as plt

import iris
import iris.analysis
import iris.plot as iplt

global_air_temp = iris.load_cube(iris.sample_data_path("air_temp.pp"))
rotated_psl = iris.load_cube(iris.sample_data_path("rotated_pole.nc"))

scheme = iris.analysis.Linear(extrapolation_mode="mask")
global_psl = rotated_psl.regrid(global_air_temp, scheme)

plt.figure(figsize=(4, 3))
iplt.pcolormesh(global_psl)
plt.title("Air pressure\n" "on a global longitude latitude grid")
ax = plt.gca()
ax.coastlines()
ax.gridlines()
ax.set_extent([-90, 70, 10, 80])

plt.show()
Example #33
0
def filepath_regrid(input_filepath,
                    target_filepath,
                    scheme=iris.analysis.AreaWeighted(mdtol=0.5)):
    input_cube = iris.load_cube(str(input_filepath))
    target_cube = iris.load_cube(str(target_filepath))
    return regrid(input_cube, target_cube, scheme)
Example #34
0
 def setUp(self):
     TestMixin.setUp(self)
     self.cube = iris.load_cube(self.theta_path)
Example #35
0
    add_categorised_coord(cube, name, coord,
                          lambda coord, x: coord.units.num2date(x).hour)


dtmindt = datetime.datetime(2011, 8, 19, 0, 0, 0)
dtmaxdt = datetime.datetime(2011, 9, 7, 23, 0, 0)
dtmin = unit.date2num(dtmindt, 'hours since 1970-01-01 00:00:00',
                      unit.CALENDAR_STANDARD)
dtmax = unit.date2num(dtmaxdt, 'hours since 1970-01-01 00:00:00',
                      unit.CALENDAR_STANDARD)
time_constraint = iris.Constraint(time=lambda t: dtmin <= t.point <= dtmax)

fr = '%s%s/%s/%s.pp' % (pp_file_path, regrid_model_min1, regrid_model, diag)
fg = '%sdjzn/djznw/%s.pp' % (pp_file_path, diag)
try:
    glob_load = iris.load_cube(fg, ('%s' % cube_names[0]) & time_constraint)
except iris.exceptions.ConstraintMismatchError:
    glob_load = iris.load_cube(fg, ('%s' % cube_names2[0]) & time_constraint)

## Get time points from global LAM to use as time constraint when loading other runs
time_list = glob_load.coord('time').points
# Some models have radiation diagnostics that are 10s offset from others so checking int values of time
glob_tc = iris.Constraint(time=lambda t: int(t.point) in time_list.astype(int))
#glob_tc = iris.Constraint(time=time_list)

del glob_load

for experiment_id in experiment_ids:

    expmin1 = experiment_id[:-1]
Example #36
0
def MaxHeight(workdir):

    times = ['201309160100', '201309160900']

    for time in times:
        filenames = glob.glob(workdir + '*' + time + '.txt')
        print(filenames)
        filename = filenames[0]

        # Identify cube attributes
        cubes = iris.load(filename)
        print(cubes[2].attributes)

        names = [
            'Level1', 'Level2', 'Level3', 'Level4', 'Level5', 'Level6',
            'Level7', 'Level8', 'Level9', 'Level10'
        ]

        colorscale = ('#b4dcff', '#04fdff', '#00ff00', '#fdff00', '#ffbd02',
                      '#ff6a00', '#fe0000', '#0000A0', '#800080', '#006400')

        # Using contourf to provide my colorbar info, then clearing the figure
        Z = [[0, 0], [0, 0]]
        levels = (0, 200, 400, 600, 800, 1000, 1200, 1400, 1600, 1800, 2000)
        CS3 = plt.contourf(Z, levels, colors=colorscale)
        plt.clf()

        # Set up axes
        ax = plt.axes(projection=ccrs.PlateCarree())
        ax.set_extent([-24, -14, 60, 67])

        for name in names:
            print(name)
            attConstraint = iris.AttributeConstraint(Name=name)
            cube = iris.load_cube(filename, attConstraint)

            # Mask data less than threshold which represents 1 real particle in /m3
            conc = cube
            conc.data = np.ma.masked_less(conc.data, 1e-12)

            # Identify time
            phenom_time = conc.coord('time')
            phenom_time_date1 = phenom_time.units.num2date(
                phenom_time.bounds[0][0]).strftime(UTC_format)
            phenom_time_date2 = phenom_time.units.num2date(
                phenom_time.bounds[0][1]).strftime(UTC_format)

            # Plot Data
            if name == 'Level1':
                colorscale = '#b4dcff'
            if name == 'Level2':
                colorscale = '#04fdff'
            if name == 'Level3':
                colorscale = '#00ff00'
            if name == 'Level4':
                colorscale = '#fdff00'
            if name == 'Level5':
                colorscale = '#ffbd02'
            if name == 'Level6':
                colorscale = '#ff6a00'
            if name == 'Level7':
                colorscale = '#fe0000'
            if name == 'Level8':
                colorscale = '#0000A0'
            if name == 'Level9':
                colorscale = '#800080'
            if name == 'Level10':
                colorscale = '#006400'
            cf1 = iplt.contourf(conc, colors=colorscale)

        # Add county outlines
        countries = cfeature.NaturalEarthFeature(category='cultural',
                                                 name='admin_0_countries',
                                                 scale='10m',
                                                 facecolor='none')
        ax.add_feature(countries, edgecolor='black', zorder=2)

        # Set-up the gridlines
        gl = ax.gridlines(draw_labels=True, linewidth=0.8, alpha=0.9)
        gl.xlabels_top = False
        gl.ylabels_right = False
        gl.xlocator = mticker.FixedLocator(
            [-24, -23, -22, -21, -20, -19, -18, -17, -16, -15,
             -14])  # lat long lines
        gl.ylocator = mticker.FixedLocator([60, 62, 64, 66,
                                            68])  #lat and long lines
        gl.xformatter = LONGITUDE_FORMATTER
        gl.yformatter = LATITUDE_FORMATTER

        #Set-up the Colour Bar
        cb = plt.colorbar(CS3)
        cbartext = 'Height m asl'
        cb.set_label(cbartext)
        cb.ax.set_xticklabels(
            [200, 400, 600, 800, 1000, 1200, 1400, 1600, 1800, 2000],
            rotation='vertical')
        plt.title(phenom_time_date2, fontsize=12)

        plt.show()
Example #37
0
#url = 'http://comt.sura.org/thredds/dodsC/data/comt_1_archive/inundation_tropical/USF_FVCOM/Hurricane_Rita_2D_final_run_without_waves'

#SELFE
#url = 'http://comt.sura.org/thredds/dodsC/data/comt_1_archive/inundation_tropical/VIMS_SELFE/Hurricane_Rita_2D_final_run_without_waves'

# set parameters
bbox = [-95, -85, 27,
        32]  # set the bounding box [lon_min, lon_max, lat_min, lat_max]
var = 'sea_surface_height_above_geoid'  # standard_name (or long_name, if no standard_name)
levs = np.arange(-1, 5.0, .2)  # set the contour levels
start = dt.datetime(2005, 9, 24, 5, 0, 0)  # time in UTC
#start = dt.datetime.utcnow() + dt.timedelta(hours=6)

# In[4]:

cube = iris.load_cube(url, var)

# In[5]:

print(cube)

# In[ ]:

ug = pyugrid.UGrid.from_ncfile(url)

# What's in there?
#print("There are %i nodes"%ug.nodes.shape[0])
#print("There are %i edges"%ug.edges.shape[0])
#print("There are %i faces"%ug.faces.shape[0])

# In[ ]:
def composite_pcp_tc_year_month(year, month, lead):
    """Computes a composite of the precipitation due to all TCs at a particular
	forecast lead time (in days) for a particular year and month.

	Total is divided by 2 at the end as each day is composited from both the
	00Z and the 12Z forecast.  (Of course there may be no tracks at certain
	times, in which case that time just contributes 0 to the total.)

	**Arguments**

	*year*, *month*
		`int`s, year and month of validity times for which to calculate the
		composite

	*lead*
		`int`, length of time after forecast initialization
	"""

    # Check lead time is in available range
    if not 0 <= lead <= 6:
        raise ValueError('lead=%s; must be 0 <= lead <= 6' % lead)
    #if year == 2017:
    #if not 1 <= month <= 7:
    #raise ValueError('Data in 2017 used up to July only')

    # Check whether output file already exists

    #infilename = TRACK_FILE %
    infile = os.path.join(TRACK_DIR_3WAY, '%Y', TRACK_FILE)
    print infile

    outdir = os.path.join(COMP_PCP_TC_DIR, str(lead), str(year))
    comp_file = COMP_PCP_TC_FILE % (lead, year, month)
    outfile = os.path.join(outdir, comp_file)

    print outfile

    if os.path.isfile(outfile):
        raise ValueError('Output file %s already exists' % outfile)
    if not os.path.isdir(outdir):
        os.makedirs(outdir)

    # Iterate for every time available in this year and month
    t1 = datetime.datetime(year, month, 1, 0)
    dt = datetime.timedelta(days=1)
    if (year, month) == (2017, 7):
        t2 = datetime.datetime(2017, 7, 11)
    elif month == 12:
        t2 = datetime.datetime(year + 1, 1, 1) - dt
    else:
        t2 = datetime.datetime(year, month + 1, 1) - dt
    pcp_cubes = iris.cube.CubeList()
    exclude = pl.exclude_days(lead)
    count_days = 0
    if (year, month) == (2010, 3):
        count_days_res = {'n320': 0.0, 'n512': 0.0}
        start_n512 = datetime.datetime(2010, 3, 9, 12)
    elif (year, month) == (2014, 7):
        count_days_res = {'n512': 0.0, 'n768': 0.0}
        start_n768 = datetime.datetime(2014, 7, 15, 12)
    vt_list = []
    while t1 <= t2:

        # Check whether this day is on the list of those to exclude
        if t1 in exclude:
            print t1, '- EXCLUDE'
            t1 += dt
            continue
        if t1.timetuple()[:3] == (2017, 7, 11):
            count_days += 0.5
        else:
            count_days += 1
        print t1.strftime('%Y/%m/%d')

        # Get list of forecast and validity times for the three forecasts to be
        # used
        ftime_deltas = np.arange(-12, 13, 12) - lead * 24
        ftimes = (t1 + datetime.timedelta(hours=hh) for hh in ftime_deltas)
        vtimes = (np.array([15, 21]) + lead * 24,
                  np.arange(3, 22, 6) + lead * 24,
                  np.array([3, 9]) + lead * 24)

        # Iterate for each of the three forecasts
        for ff, vv in itertools.izip(ftimes, vtimes):

            # If on or after 2017/07/11 12:00, skip
            #if ff >= datetime.datetime(2017, 7, 11, 12):
            #continue

            # Get year, month, day, hour, lon, lat from file
            this_infile = ff.strftime(infile)
            with warnings.catch_warnings():
                warnings.simplefilter('ignore')
                ain = np.genfromtxt(this_infile,
                                    dtype=float,
                                    skip_header=1,
                                    usecols=range(3, 9))

            # Count days for each resolution
            for v in vv:
                vt = ff + datetime.timedelta(hours=v)
                if vt not in vt_list:
                    vt_list.append(vt)
                    if (year, month) == (2010, 3):
                        if vt < start_n512:
                            count_days_res['n320'] += 0.25
                        else:
                            count_days_res['n512'] += 0.25
                    elif (year, month) == (2014, 7):
                        if vt < start_n768:
                            count_days_res['n512'] += 0.25
                        else:
                            count_days_res['n768'] += 0.25

            # If no tracks in this forecast, skip it
            if not ain.size:
                print '   ', ff, '- no tracks'
                continue

            # Iterate for every validity time required from this forecast
            for v in vv:

                # Get track(s) with point(s) this time
                gd = ff + datetime.timedelta(hours=v)
                aint = ain[np.where((ain[:, 0] == gd.year) &\
                     (ain[:, 1] == gd.month) &\
                     (ain[:, 2] == gd.day) &\
                     (ain[:, 3] == gd.hour))]
                if not aint.size:
                    print '   ', ff, 'T+%03d' % v, '- no tracks'
                    continue
                print '   ', ff, 'T+%03d' % v

                # Iterate for each track
                for lon, lat in aint[:, [4, 5]]:
                    this_pcp = cf.nwp_pcp_accumulation(ff, v, lon, lat)
                    this_pcp.coord(axis='X').var_name = 'longitude'
                    this_pcp.coord(axis='Y').var_name = 'latitude'
                    this_pcp.coord(axis='X').attributes = {}
                    this_pcp.coord(axis='Y').attributes = {}
                    pcp_cubes.append(iris.util.squeeze(this_pcp))

        # Increment time
        t1 += dt

    # If no Cubes, create a dummy one with zeros
    def dummy_cube():
        dummy = None
        dummy_t = datetime.datetime(year, month, 1)
        while dummy is None:
            dummy = this_pcp = cf.nwp_pcp_accumulation(dummy_t, 3)
            dummy_t += dt
        dummy = iris.util.squeeze(dummy)
        dummy.data = np.zeros_like(dummy.data)
        dummy.remove_coord(dummy.coord(axis='T'))
        return dummy

    if not len(pcp_cubes):
        pcp_cubes = iris.cube.CubeList([dummy_cube()])

    # Sum over Cubes and divide by 2
    pcp = pl.add_cubes(
        pcp_cubes, deal_with_masks=False, contributing_days=False) / 2.

    # Set metadata
    pcp.units = 'mm'
    pcp.standard_name = 'lwe_thickness_of_precipitation_amount'
    pcp.long_name = 'precipitation'
    pcp.var_name = 'pcp'
    pcp.attributes['contributing_days'] = count_days

    # Save
    iris.save(pcp, outfile)
    print outfile

    # For months with more than one resolution, sum separately and divide by 2
    if (year, month) in [(2010, 3), (2014, 7)]:
        if year == 2010:
            res_list = ['n320', 'n512']
        elif year == 2014:
            res_list = ['n512', 'n768']
        #res_list = {2010: ['n320', 'n512'], 2014: ['n512', 'n768']}[year]
        pcp_sep = pl.add_cubes(pcp_cubes,
                               deal_with_masks=False,
                               separate_resolutions=True,
                               contributing_days=False)
        file_tot = os.path.join(COMP_PCP_TOT_DIR, str(lead), str(year),
                                COMP_PCP_TOT_FILE % (lead, year, month))
        for k in pcp_sep.iterkeys():
            pcp_sep_k = pcp_sep[k] / 2.

            # Set metadata
            pcp_sep_k.units = 'mm'
            pcp_sep_k.standard_name = 'lwe_thickness_of_precipitation_amount'
            pcp_sep_k.long_name = 'precipitation'
            pcp_sep_k.var_name = 'pcp'

            # Number of contributing days is difficult to count so just get the
            # value from the total pcp composites (the number should be the
            # same anyway)
            res = {640: 'n320', 1024: 'n512', 1536: 'n768'}[k[1]]
            res_list.remove(res)
            file_tot_k = file_tot.replace('.nc', '%s.nc' % res)
            cube_tot_k = iris.load_cube(file_tot_k)
            pcp_sep_k.attributes['contributing_days'] = \
             float(cube_tot_k.attributes['contributing_days'])

            # Save
            outfile_k = outfile.replace('.nc', '.%s.nc' % res)
            iris.save(pcp_sep_k, outfile_k)
            print outfile_k

        # If any resolutions are still in res_list it means there were no
        # tracks at that resolution, so save an empty Cube
        for res in res_list:
            pcp_sep_k = dummy_cube()
            pcp_sep_k.units = 'mm'
            pcp_sep_k.standard_name = 'lwe_thickness_of_precipitation_amount'
            pcp_sep_k.long_name = 'precipitation'
            pcp_sep_k.var_name = 'pcp'
            file_tot_k = file_tot.replace('.nc', '%s.nc' % res)
            cube_tot_k = iris.load_cube(file_tot_k)
            pcp_sep_k.attributes['contributing_days'] = float(
                cube_tot_k.attributes['contributing_days'])
            outfile_k = outfile.replace('.nc', '.%s.nc' % res)
            iris.save(pcp_sep_k, outfile_k)
            print outfile_k
Example #39
0
def main(inargs):
    """Run the program."""

    region = inargs.region.replace('-', '_')

    # Basin data
    hfbasin = True if inargs.var == 'northward_ocean_heat_transport' else False
    if not hfbasin:
        assert inargs.basin_file, "Must provide a basin file for hfy data"
        basin_cube = iris.load_cube(inargs.basin_file)
    else:
        basin_cube = None
        inargs.basin_file = None

    # Heat transport data
    data_cube = read_data(inargs.infiles, inargs.var, basin_cube, region)
    orig_standard_name = data_cube.standard_name
    orig_var_name = data_cube.var_name
  
    history_attribute = get_history_attribute(inargs.infiles, data_cube, inargs.basin_file, basin_cube)
    data_cube.attributes['history'] = gio.write_metadata(file_info=history_attribute)

    # Regrid (if needed)
    if inargs.regrid:
        data_cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(data_cube)

    dim_coord_names = [coord.name() for coord in data_cube.dim_coords]
    aux_coord_names = [coord.name() for coord in data_cube.aux_coords]
    
    regular_grid = False if aux_coord_names else True

    if hfbasin:
        assert len(dim_coord_names) == 2
        assert dim_coord_names[0] == 'time'
        y_axis_name = dim_coord_names[1]
    else:
        assert len(dim_coord_names) == 3
        assert dim_coord_names[0] == 'time'
        y_axis_name, x_axis_name = dim_coord_names[1:]    
        for aux_coord in aux_coord_names:
            data_cube.remove_coord(aux_coord)

    # Basin array
    if inargs.basin_file and not inargs.regrid:
        ndim = data_cube.ndim
        basin_array = uconv.broadcast_array(basin_cube.data, [ndim - 2, ndim - 1], data_cube.shape) 
    elif regular_grid and not hfbasin: 
        basin_array = uconv.create_basin_array(data_cube)

    # Calculate the zonal sum (if required)
    data_cube_copy = data_cube.copy()
 
    if hfbasin:
        zonal_cube = data_cube_copy
    else:
        zonal_cube = data_cube_copy.collapsed(x_axis_name, iris.analysis.SUM)
        zonal_cube.remove_coord(x_axis_name)

    # Attributes
    try:
        zonal_cube.remove_coord('region')
    except iris.exceptions.CoordinateNotFoundError:
        pass

    standard_name = 'northward_ocean_heat_transport'
    var_name = 'hfbasin'

    zonal_cube.standard_name = standard_name
    zonal_cube.long_name = standard_name.replace('_', ' ')
    zonal_cube.var_name = var_name   

    if inargs.cumsum:
        zonal_cube = uconv.flux_to_magnitude(zonal_cube)
        zonal_aggregate = cumsum(zonal_cube)
        
    iris.save(zonal_cube, inargs.outfile)
Example #40
0
def main(
        surf_cube,
        flux_cube,
        atm_cube,
        anom=True,
        ols_out='slope',
        surf_scale=None,
        flux_scale=None,
        atm_scale=None,
        wet_dry=False,
        weighting=True,
        pre_data_path=('/nfs/a68/gyjcab/datasets/lapse_data_harmonised/'
                       'Jan_2018/Final/1.0deg/'
                       'pr_trmm_3b43_mon_1.0deg_1998_2016.nc'),
        constraint_yrs=None,
        plotting=False,
        plotting_args={
            'name': 'Observations',
            'lat_lims': [-60, 30],
            'lon_lims': [-120, 180],
            'levels': [(-10, 10, 11), (-15, 15, 11), (-10, 10, 11)]
        },
        p_thresh=0.05,
        corr_method='pearson'):
    """

    Program for calculating two legged metric with surface, flux and
    atmospheric variables.

    Takes Iris cubes as input.

    Arguments:
        surf_cube = Iris cube of surface state variable.
        flux_cube = Iris cube of flux variable.
        atm_cube = Iris cube of atmospheric state variable.
        anom = Boolean. Calculate metrics using anomalies from climatological
               seasonal cycle (True) or interannual monthly data (False).
        ols_out = output from linear regression. Accepts 'slope' or 'r'.
        surf_scale = scale factor for surface variable.
        flux_scale = scale factor for flux variable.
        atm_scale = scale factor for atmospheric variable.
        wet_dry = Boolean. Calculate metric using data from  6 wettest and 6
                  dryest months in each pixel (True) or using data from all
                  months (False).
        weighting = Boolean. Weight output arrays by standard deviation of
                    denominator (requirement of two-legged metric). Option to
                    remove weighting may be preferred when calculating
                    correlation coefficents.
        pre_data_path = If wet_dry is True, path for precipitation data used to
                        identify wet and dry months.
        constraint_yrs = Length 2 array with start and end years of constraint.
        plotting = Boolean. Plot output of metric. If False returns output of
                   metric as arrays only.
        plotting_args = dictionary of plotting arguments, including name of
                        data being plotted (observations or name of model),
                        figure size, limits for output map, and colorbar
                        levels.
        p_thresh = p threshold for calculating significance of correlations.
        corr_method = correlation method. Can be 'pearson' (assumes data are
                      normally distributed) or 'spearman' (no assumption 
                      about the distribution).

    """
    # Apply scaling factors
    if surf_scale is not None:
        surf_cube.data = surf_cube.data * surf_scale
    if flux_scale is not None:
        flux_cube.data = flux_cube.data * flux_scale
    if atm_scale is not None:
        atm_cube.data = atm_cube.data * atm_scale

    # Check if lats are ascending, if not then reverse
    surf_cube = flip_lats(surf_cube)
    flux_cube = flip_lats(flux_cube)
    atm_cube = flip_lats(atm_cube)

    # Reorder data from -180 to +180 degrees
    temp_lon = surf_cube.coord('longitude').points
    if temp_lon.max() > 180:
        surf_cube = minus180_to_plus180(surf_cube)
        flux_cube = minus180_to_plus180(flux_cube)
        atm_cube = minus180_to_plus180(atm_cube)

    # Calculate anomalies versus climatological seasonal cycle
    if anom is True:
        surf_cube = monthly_anom_cube(surf_cube)
        flux_cube = monthly_anom_cube(flux_cube)
        atm_cube = monthly_anom_cube(atm_cube)

    # Extract data from input cubes
    surf_var = surf_cube.data
    lat = surf_cube.coord('latitude').points
    lon = surf_cube.coord('longitude').points
    flux_var = flux_cube.data
    atm_var = atm_cube.data

    # Constrain data to required years
    if constraint_yrs is not None:
        constraint = iris.Constraint(time=lambda cell: constraint_yrs[0] <=
                                     cell.point.year <= constraint_yrs[1])
    else:
        constraint = None

    # Calculate for wet and dry months separately
    if wet_dry is True:

        # For each pixel identify wettest 6 months
        # Read in precipitation data
        try:
            data_path = (pre_data_path)
            pre_cube = iris.load_cube(data_path, constraint=constraint)
        except NameError:
            print('Need to specify filepath for precipitation data to '
                  'calculate wet/dry months')
            assert False

        # Regrid precipitation data to resolution of input array
        target_cube = surf_cube
        scheme = iris.analysis.AreaWeighted(mdtol=0.5)
        pre_cube = pre_cube.regrid(target_cube, scheme)

        # Calculate seasonal cycle for each pixel
        iris.coord_categorisation.add_month(pre_cube, 'time', name='month')
        pre_mn = pre_cube.aggregated_by(['month'], iris.analysis.MEAN)

        # For all pixels get indices of wet months
        nyear = int(surf_var.shape[0] / 12)
        wet_bool = np.zeros(
            (nyear * 12, pre_cube.shape[-2], pre_cube.shape[-1]))
        for ny in range(pre_mn.shape[-2]):
            for nx in range(pre_mn.shape[-1]):
                cycle = pre_mn.data[:, ny, nx]
                if np.nanmax(cycle) > 0:
                    wet_idx = sorted(range(12), key=lambda x: cycle[x])[-6:]
                    for yr in range(nyear):
                        for w in wet_idx:
                            wet_bool[w + 12 * yr, ny, nx] = 1
                else:
                    wet_bool[:, ny, nx] = np.nan

        # Define dictionaries to hold output
        wet_arrays = {'surf_leg': None, 'atm_leg': None, 'product': None}
        dry_arrays = {'surf_leg': None, 'atm_leg': None, 'product': None}
        data_dict = {'wet': wet_arrays, 'dry': dry_arrays}

        # Calculate metric for wet and dry seasons
        for season in ['wet', 'dry']:
            print(season)
            print(np.nanmin(surf_var), np.nanmax(surf_var))
            two_legged_output = calculating_legs(surf_var,
                                                 flux_var,
                                                 atm_var,
                                                 ols_out=ols_out,
                                                 wet_bool=wet_bool,
                                                 season=season,
                                                 weighting=weighting,
                                                 p_thresh=p_thresh,
                                                 corr_method=corr_method)
            surf_leg = two_legged_output[0]
            surf_pvals = two_legged_output[1]
            atm_leg = two_legged_output[2]
            atm_pvals = two_legged_output[3]
            product = two_legged_output[4]
            product_pvals = two_legged_output[5]

            data_dict[season]['surf_leg'] = surf_leg
            data_dict[season]['surf_pvals'] = surf_pvals
            data_dict[season]['atm_leg'] = atm_leg
            data_dict[season]['atm_pvals'] = atm_pvals
            data_dict[season]['product'] = product
            data_dict[season]['product_pvals'] = product_pvals

            # Call plotting routine
            if plotting is True:

                # Define plotting variables
                name = plotting_args['name'] + ': ' + season + ' season'
                surf_name = surf_cube.long_name
                flux_name = flux_cube.long_name
                atm_name = atm_cube.long_name

                if ols_out == 'slope':
                    if surf_scale is None:
                        surf_scale = ''
                    else:
                        surf_scale = str(' ({:.0e}'.format(surf_scale)) + ' '

                    if flux_scale is None:
                        flux_scale = ''
                    else:
                        flux_scale = str(' ({:.0e}'.format(flux_scale)) + ' '

                    if atm_scale is None:
                        atm_scale = ''
                    else:
                        atm_scale = str(' ({:.0e}'.format(atm_scale)) + ' '

                    surf_leg_unit = (flux_scale + str(flux_cube.units) + '/' +
                                     surf_scale + str(surf_cube.units))
                    atm_leg_unit = (atm_scale + str(atm_cube.units) + '/' +
                                    flux_scale + str(flux_cube.units))
                    product_unit = (atm_scale + str(atm_cube.units) + '/' +
                                    surf_scale + str(surf_cube.units))

                elif ols_out == 'r':
                    surf_leg_unit = ' '
                    atm_leg_unit = ' '
                    product_unit = ' '
                lat_lims = plotting_args['lat_lims']
                lon_lims = plotting_args['lon_lims']
                levels = plotting_args['levels']

                plot_two_legged(name,
                                surf_name,
                                surf_leg,
                                surf_leg_unit,
                                flux_name,
                                atm_leg,
                                atm_leg_unit,
                                atm_name,
                                product,
                                product_unit,
                                lat,
                                lon,
                                lat_lims,
                                lon_lims,
                                levs=levels)

        return (data_dict, wet_bool, lat, lon)

    # Calculate metric using data from all months
    else:
        two_legged_output = calculating_legs(surf_var,
                                             flux_var,
                                             atm_var,
                                             ols_out=ols_out,
                                             weighting=weighting,
                                             p_thresh=p_thresh,
                                             corr_method=corr_method)
        surf_leg = two_legged_output[0]
        surf_pvals = two_legged_output[1]
        atm_leg = two_legged_output[2]
        atm_pvals = two_legged_output[3]
        product = two_legged_output[4]
        product_pvals = two_legged_output[5]

        # Call plotting routine
        if plotting is True:

            # Define plotting variables
            name = plotting_args['name']
            surf_name = surf_cube.long_name
            if surf_name is None:
                surf_name = surf_cube.standard_name
            print(surf_name)
            flux_name = flux_cube.long_name
            if flux_name is None:
                flux_name = flux_cube.standard_name
            print(flux_name)
            atm_name = atm_cube.long_name
            if atm_name is None:
                atm_name = atm_cube.standard_name
            print(atm_name)

            if ols_out == 'slope':
                if surf_scale is None:
                    surf_scale = ''
                else:
                    surf_scale = str(' ({:.0e}'.format(surf_scale)) + ' '

                if flux_scale is None:
                    flux_scale = ''
                else:
                    flux_scale = str(' ({:.0e}'.format(flux_scale)) + ' '

                if atm_scale is None:
                    atm_scale = ''
                else:
                    atm_scale = str(' ({:.0e}'.format(atm_scale)) + ' '

                surf_leg_unit = (flux_scale + str(flux_cube.units) + '/' +
                                 surf_scale + str(surf_cube.units))
                atm_leg_unit = (atm_scale + str(atm_cube.units) + '/' +
                                flux_scale + str(flux_cube.units))
                product_unit = (atm_scale + str(atm_cube.units) + '/' +
                                surf_scale + str(surf_cube.units))

            elif ols_out == 'r':
                surf_leg_unit = ' '
                atm_leg_unit = ' '
                product_unit = ' '
            lat_lims = plotting_args['lat_lims']
            lon_lims = plotting_args['lon_lims']
            levels = plotting_args['levels']

            plot_two_legged(name,
                            surf_name,
                            surf_leg,
                            surf_leg_unit,
                            flux_name,
                            atm_leg,
                            atm_leg_unit,
                            atm_name,
                            product,
                            product_unit,
                            lat,
                            lon,
                            lat_lims,
                            lon_lims,
                            levs=levels)

    return (surf_leg, surf_pvals, atm_leg, atm_pvals, product, product_pvals,
            lat, lon)
Example #41
0
# --- CHANGE THINGS BELOW THIS LINE TO WORK WITH YOUR FILES ETC. ---

# name of file containing an ENDGame grid, e.g. your model output
# NOTE: all the fields in the file should be on the same horizontal
#       grid, as the field used MAY NOT be the first in order of STASH
grid_file='/home/vagrant/cylc-run/u-as297/work/1/atmos/atmosa.pa19810901_00'
# name of emissions file
emissions_file='/home/vagrant/Tutorial/vn10.9/Task5.1/Emissions_of_ALICE.nc'

# --- BELOW THIS LINE, NOTHING SHOULD NEED TO BE CHANGED ---

species_name='ALICE'

# this is the grid we want to regrid to, e.g. N48 ENDGame
grd=iris.load_cube(grid_file,iris.AttributeConstraint(STASH='m01s34i010'))
grd.coord(axis='x').guess_bounds()
grd.coord(axis='y').guess_bounds()

# This is the original data
ems=iris.load_cube(emissions_file)
# make intersection between 0 and 360 longitude to ensure that 
# the data is regridded correctly
nems = ems.intersection(longitude=(0, 360))

# make sure that we use the same coordinate system, otherwise regrid won't work
nems.coord(axis='x').coord_system=grd.coord_system()
nems.coord(axis='y').coord_system=grd.coord_system()

# now guess the bounds of the new grid prior to regridding
nems.coord(axis='x').guess_bounds()
Example #42
0
def main():
    # extract surface temperature cubes which have an ensemble member coordinate, adding appropriate lagged ensemble metadata
    surface_temp = iris.load_cube(
        iris.sample_data_path('GloSea4', 'ensemble_???.pp'),
        iris.Constraint('surface_temperature', realization=lambda value: True),
        callback=realization_metadata,
    )

    # ----------------------------------------------------------------------------------------------------------------
    # Plot #1: Ensemble postage stamps
    # ----------------------------------------------------------------------------------------------------------------

    # for the purposes of this example, take the last time element of the cube
    last_timestep = surface_temp[:, -1, :, :]

    # Make 50 evenly spaced levels which span the dataset
    contour_levels = np.linspace(np.min(last_timestep.data),
                                 np.max(last_timestep.data), 50)

    # Create a wider than normal figure to support our many plots
    plt.figure(figsize=(12, 6), dpi=100)

    # Also manually adjust the spacings which are used when creating subplots
    plt.gcf().subplots_adjust(hspace=0.05,
                              wspace=0.05,
                              top=0.95,
                              bottom=0.05,
                              left=0.075,
                              right=0.925)

    # iterate over all possible latitude longitude slices
    for cube in last_timestep.slices(['latitude', 'longitude']):

        # get the ensemble member number from the ensemble coordinate
        ens_member = cube.coord('realization').points[0]

        # plot the data in a 4x4 grid, with each plot's position in the grid being determined by ensemble member number
        # the special case for the 13th ensemble member is to have the plot at the bottom right
        if ens_member == 13:
            plt.subplot(4, 4, 16)
        else:
            plt.subplot(4, 4, ens_member + 1)

        cf = iplt.contourf(cube, contour_levels)

        # add coastlines
        plt.gca().coastlines()

    # make an axes to put the shared colorbar in
    colorbar_axes = plt.gcf().add_axes([0.35, 0.1, 0.3, 0.05])
    colorbar = plt.colorbar(cf, colorbar_axes, orientation='horizontal')
    colorbar.set_label('%s' % last_timestep.units)

    # limit the colorbar to 8 tick marks
    import matplotlib.ticker
    colorbar.locator = matplotlib.ticker.MaxNLocator(8)
    colorbar.update_ticks()

    # get the time for the entire plot
    time_coord = last_timestep.coord('time')
    time = time_coord.units.num2date(time_coord.bounds[0, 0])

    # set a global title for the postage stamps with the date formated by "monthname year"
    plt.suptitle('Surface temperature ensemble forecasts for %s' %
                 time.strftime('%B %Y'))

    iplt.show()

    # ----------------------------------------------------------------------------------------------------------------
    # Plot #2: ENSO plumes
    # ----------------------------------------------------------------------------------------------------------------

    # Nino 3.4 lies between: 170W and 120W, 5N and 5S, so define a constraint which matches this
    nino_3_4_constraint = iris.Constraint(
        longitude=lambda v: -170 + 360 <= v <= -120 + 360,
        latitude=lambda v: -5 <= v <= 5)

    nino_cube = surface_temp.extract(nino_3_4_constraint)

    # Subsetting a circular longitude coordinate always results in a circular coordinate, so set the coordinate to be non-circular
    nino_cube.coord('longitude').circular = False

    # Calculate the horizontal mean for the nino region
    mean = nino_cube.collapsed(['latitude', 'longitude'], iris.analysis.MEAN)

    # Calculate the ensemble mean of the horizontal mean. To do this, remove the "forecast_period" and
    # "forecast_reference_time" coordinates which span both "relalization" and "time".
    mean.remove_coord("forecast_reference_time")
    mean.remove_coord("forecast_period")
    ensemble_mean = mean.collapsed('realization', iris.analysis.MEAN)

    # take the ensemble mean from each ensemble member
    mean -= ensemble_mean.data

    plt.figure()

    for ensemble_member in mean.slices(['time']):
        # draw each ensemble member as a dashed line in black
        iplt.plot(ensemble_member, '--k')

    plt.title('Mean temperature anomaly for ENSO 3.4 region')
    plt.xlabel('Time')
    plt.ylabel('Temperature anomaly / K')

    plt.show()
Example #43
0
    res.data[res.data > 280.2] = 0.50
    res.data[res.data > 277.2] = 0.45
    res.data[res.data > 274.4] = 0.40
    res.data[res.data > 272.3] = 0.35
    res.data[res.data > 268.3] = 0.30
    res.data[res.data > 261.4] = 0.25
    res.data[res.data > 254.6] = 0.20
    res.data[res.data > 249.1] = 0.15
    res.data[res.data > 244.9] = 0.10
    res.data[res.data > 240.5] = 0.05
    res.data[res.data > 0.95] = 0.0
    return res


# Function to do the multivariate plot
lsmask = iris.load_cube("%s/fixed_fields/land_mask/opfc_global_2019.nc" %
                        os.getenv('DATADIR'))
# Random field for the wind noise
z = lsmask.regrid(plot_cube(0.5), iris.analysis.Linear())
(width, height) = z.data.shape
z.data = numpy.random.rand(width, height)


def three_plot(ax, t2m, u10m, v10m, precip):
    ax.set_xlim(-180, 180)
    ax.set_ylim(-90, 90)
    ax.set_aspect('auto')
    ax.set_axis_off()  # Don't want surrounding x and y axis
    ax.add_patch(
        Rectangle((0, 0),
                  1,
                  1,
Example #44
0
def main(inargs):
    """Run the program."""

    cube = iris.load(inargs.infiles,
                     gio.check_iris_var(inargs.var),
                     callback=save_history)

    atts = cube[0].attributes
    equalise_attributes(cube)
    iris.util.unify_time_units(cube)
    cube = cube.concatenate_cube()
    cube = gio.check_time_units(cube)

    cube.attributes = atts
    orig_long_name = cube.long_name
    if cube.standard_name == None:
        orig_standard_name = orig_long_name.replace(' ', '_')
    else:
        orig_standard_name = cube.standard_name
    orig_var_name = cube.var_name

    # Temporal smoothing
    cube = timeseries.convert_to_annual(cube, full_months=True)

    # Mask marginal seas
    if inargs.basin:
        if '.nc' in inargs.basin:
            basin_cube = iris.load_cube(inargs.basin_file)
            cube = uconv.mask_marginal_seas(cube, basin_cube)
        else:
            basin_cube = 'create'
    else:
        basin_cube = None

    # Regrid (if needed)
    if inargs.regrid:
        cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(
            cube)

    # Change units (remove m-2)
    if inargs.area:
        cube = multiply_by_area(cube, inargs.area)
        cube.attributes = atts
        cube.long_name = orig_long_name
        cube.standard_name = orig_standard_name
        cube.var_name = orig_var_name

    # History
    history_attribute = get_history_attribute(inargs.infiles[0], history[0])
    cube.attributes['history'] = gio.write_metadata(
        file_info=history_attribute)

    # Calculate output for each basin
    if type(basin_cube) == iris.cube.Cube:
        ndim = cube.ndim
        basin_array = uconv.broadcast_array(basin_cube.data,
                                            [ndim - 2, ndim - 1], cube.shape)
        basin_list = ['atlantic', 'pacific', 'indian', 'globe']
    elif type(basin_cube) == str:
        basin_array = uconv.create_basin_array(cube)
        basin_list = ['atlantic', 'pacific', 'indian', 'globe']
    else:
        basin_array = None
        basin_list = ['globe']

    dim_coord_names = [coord.name() for coord in cube.dim_coords]
    aux_coord_names = [coord.name() for coord in cube.aux_coords]
    assert len(dim_coord_names) == 3
    assert dim_coord_names[0] == 'time'
    x_axis_name = dim_coord_names[2]

    for aux_coord in aux_coord_names:
        cube.remove_coord(aux_coord)

    out_cubes = []
    for basin_name in basin_list:
        data_cube = cube.copy()
        if not basin_name == 'globe':
            data_cube.data.mask = numpy.where(
                (data_cube.data.mask == False) &
                (basin_array == basins[basin_name]), False, True)

        # Zonal statistic
        zonal_cube = data_cube.collapsed(
            x_axis_name, aggregation_functions[inargs.zonal_stat])
        zonal_cube.remove_coord(x_axis_name)

        # Attributes
        standard_name = 'zonal_%s_%s_%s' % (inargs.zonal_stat,
                                            orig_standard_name, basin_name)
        var_name = '%s_%s_%s' % (orig_var_name,
                                 aggregation_abbreviations[inargs.zonal_stat],
                                 basin_name)
        iris.std_names.STD_NAMES[standard_name] = {
            'canonical_units': zonal_cube.units
        }

        zonal_cube.standard_name = standard_name
        zonal_cube.long_name = standard_name.replace('_', ' ')
        zonal_cube.var_name = var_name

        out_cubes.append(zonal_cube)

    out_cubes = iris.cube.CubeList(out_cubes)
    iris.save(out_cubes, inargs.outfile)
Example #45
0
def makeCmipCube(infiles, varname, fullvarname, minyear, maxyear, template):
    '''
    Makes a cube only from the relevant CMIP5 input files.
    This is useful because it removes problems that may occur with files that are outside the period of interest
    '''
    if '@' in varname:
        plev = int(varname.split('@')[1])
        var = varname.split('@')[0]
    else:
        plev = 'none'

    model_cubes = iris.cube.CubeList([])
    # Loop through files, and only use files that are <= maxfutureyear
    for file in infiles:
        # Use the file path to determine the start and end date of the data
        file_dates = os.path.splitext(os.path.basename(file))[0].split('_')[5]
        strt_dt_txt = file_dates.split('-')[0]
        end_dt_txt = file_dates.split('-')[1]
        strt_dt = datetime.datetime(int(strt_dt_txt[0:4]), int(strt_dt_txt[4:6]), 1, 0, 0)
        end_dt = datetime.datetime(int(end_dt_txt[0:4]), int(end_dt_txt[4:6]), 1, 0, 0)
        # If the start date is within the
        #print 'minyear: ' + str(minyear) + ', maxyear: ' + str(maxyear)
        if (strt_dt.year < maxyear) & (end_dt.year > minyear):
            #print file
            cube = iris.load_cube(file, fullvarname, callback=io.callback_champ_stdcal) # varname,
            # Extract correct vertical level
            if not plev == 'none':
                try:
                    cube = cube.extract(iris.Constraint(air_pressure=plev*100.))
                except:
                    try:
                        cube = cube.extract(iris.Constraint(atmosphere_hybrid_sigma_pressure_coordinate=plev*100.))
                    except:
                        print 'Probably need to change air_pressure for something else '
                        pdb.set_trace()

            model_cubes.append(cube)


    iris.util.unify_time_units(model_cubes)
    iris.experimental.equalise_cubes.equalise_attributes(model_cubes)

    # Equalise metadata ...
    for i in 1+np.arange(len(model_cubes)-1):
        model_cubes[i].metadata = model_cubes[0]

    try:
        model_cube = model_cubes.concatenate_cube()
    except:
        print 'Concatenation failed ...'
        pdb.set_trace()

    # Extract relevant years from the data
    yrcon = iris.Constraint(season_year = lambda cell, minyr=minyear, maxyr=maxyear: minyr <= cell <= maxyr)
    model_ss = model_cube.extract(yrcon)
    # Regrid to tempate grid
    model_ss = guesslatlonbounds(model_ss)
    model_ss = model_ss.regrid(template,ia.AreaWeighted())
    #print model_ss

    if varname == 'precipitation_flux':
        model_ss = proc.precip_to_mmpday(model_ss)

    return(model_ss)
Example #46
0
 def test_uint32_data_netcdf3(self):
     self.cube.data = self.cube.data.astype(np.uint32)
     with self.temp_filename(suffix='.nc') as filename:
         iris.save(self.cube, filename, netcdf_format='NETCDF3_CLASSIC')
         reloaded = iris.load_cube(filename)
         self.assertCML(reloaded, ('netcdf', 'uint32_data_netcdf3.cml'))
Example #47
0
 def test_scalar_cube_save_load(self):
     cube = iris.cube.Cube(1, long_name="scalar_cube")
     with self.temp_filename(suffix=".nc") as fout:
         iris.save(cube, fout)
         scalar_cube = iris.load_cube(fout)
         self.assertEqual(scalar_cube.name(), "scalar_cube")
Example #48
0
# <codecell>

# SECOORA region (NC, SC GA, FL).
bbox = [-87.40, 24.25, -74.70, 36.70]

url = "http://geoport-dev.whoi.edu/thredds/dodsC/estofs/atlantic"

# <markdowncell>

# # Works fine

# <codecell>

t0 = time.time()
cube = iris.load_cube(url, 'sea_surface_height_above_geoid')

lon = iris.Constraint(longitude=lambda l: bbox[0] <= l <= bbox[2])
lat = iris.Constraint(latitude=lambda l: bbox[1] <= l < bbox[3])

cube = cube.extract(lon & lat)
print 'elapsed time = %f seconds' % (time.time() - t0)
print(cube)

# <markdowncell>

# # Hangs and I have to hit the kernel interrupt key

# <codecell>

t0 = time.time()
Example #49
0
 def test_print(self):
     cube = iris.load_cube(self.fname)
     printed = cube.__str__()
     self.assertTrue(("\n     Cell measures:\n          cell_area"
                      "                           -         -    "
                      "    x         x") in printed)
Example #50
0
 def test_load_laea_grid(self):
     cube = iris.load_cube(
         tests.get_data_path(("NetCDF", "lambert_azimuthal_equal_area",
                              "euro_air_temp.nc")))
     self.assertCML(cube, ("netcdf", "netcdf_laea.cml"))
Example #51
0
 def setUp(self):
     file = tests.get_data_path(('PP', 'globClim1', 'theta.pp'))
     self.cube = iris.load_cube(file)
Example #52
0
 def test_load(self):
     cube = iris.load_cube(self.fname)
     self.assertEqual(len(cube.cell_measures()), 1)
     self.assertEqual(cube.cell_measures()[0].measure, "area")
Example #53
0
def main():
    # Min and max lats lons from smallest model domain (dkbhu) - see spreadsheet

    latmin = -6.79
    latmax = 29.721
    lonmin = 340.
    lonmax = 379.98

    lat_constraint = iris.Constraint(
        grid_latitude=lambda la: latmin <= la.point <= latmax)
    lon_constraint = iris.Constraint(
        grid_longitude=lambda lo: lonmin <= lo.point <= lonmax)

    # Global LAM not rotated - so different coord constraints
    lonmin_g = 64.1153327
    lonmax_g = 101.865817

    lon_constraint_g = iris.Constraint(
        grid_longitude=lambda lo: lonmin_g <= lo.point <= lonmax_g)
    # Load global cube

    gl = '/nfs/a90/eepdw/Data/EMBRACE/Mean_State/pp_files/djzn/djznw/%s.pp' % pp_file
    glob = iris.load_cube(gl, lat_constraint & lon_constraint_g)
    #glob = iris.load_cube(gl)
    cs_glob = glob.coord_system('CoordSystem')
    # Unrotate global cube

    lat_g = glob.coord('grid_latitude').points
    lon_g = glob.coord('grid_longitude').points

    #print lat_g
    if isinstance(cs_glob, iris.coord_systems.RotatedGeogCS):
        print ' Global Model - djznw - Unrotate pole %s' % cs_glob
        lons_g, lats_g = np.meshgrid(lon_g, lat_g)
        lons_g, lats_g = iris.analysis.cartography.unrotate_pole(
            lons_g, lats_g, cs_glob.grid_north_pole_longitude,
            cs_glob.grid_north_pole_latitude)

        lon_g = lons_g[0]
        lat_g = lats_g[:, 0]

        #print lats_g

    for i, coord in enumerate(glob.coords()):
        if coord.standard_name == 'grid_latitude':
            lat_dim_coord_glob = i
        if coord.standard_name == 'grid_longitude':
            lon_dim_coord_glob = i

    csur_glob = cs_glob.ellipsoid
    glob.remove_coord('grid_latitude')
    glob.remove_coord('grid_longitude')
    glob.add_dim_coord(
        iris.coords.DimCoord(points=lat_g,
                             standard_name='grid_latitude',
                             units='degrees',
                             coord_system=csur_glob), lat_dim_coord_glob)
    glob.add_dim_coord(
        iris.coords.DimCoord(points=lon_g,
                             standard_name='grid_longitude',
                             units='degrees',
                             coord_system=csur_glob), lon_dim_coord_glob)

    experiment_ids = [
        'djzny', 'djznq', 'djzns', 'djznw', 'dkjxq', 'dklyu', 'dkmbq', 'dklwu',
        'dklzq'
    ]

    #experiment_ids = ['djzny' ]
    for experiment_id in experiment_ids:

        expmin1 = experiment_id[:-1]
        pfile = '/nfs/a90/eepdw/Data/EMBRACE/Mean_State/pp_files/%s/%s/%s.pp' % (
            expmin1, experiment_id, pp_file)

        #pc =  iris(pfile)
        #pcube = iris.load_cube(pfile, lat_constraint & lon_constraint)
        pcube = iris.load_cube(pfile)
        #print pcube
        #print pc

        # Get min and max latitude/longitude and unrotate  to get min/max corners to crop plot automatically - otherwise end with blank bits on the edges

        # Unrotate cube

        lat = pcube.coord('grid_latitude').points
        lon = pcube.coord('grid_longitude').points

        #print lat
        #print 'lat'
        #print lon
        cs = pcube.coord_system('CoordSystem')

        if isinstance(cs, iris.coord_systems.RotatedGeogCS):
            print ' %s - Unrotate pole %s' % (experiment_id, cs)

            lons, lats = np.meshgrid(lon, lat)

            lons, lats = iris.analysis.cartography.unrotate_pole(
                lons, lats, cs.grid_north_pole_longitude,
                cs.grid_north_pole_latitude)

            lon = lons[0]
            lat = lats[:, 0]

            for i, coord in enumerate(pcube.coords()):
                if coord.standard_name == 'grid_latitude':
                    lat_dim_coord = i
                if coord.standard_name == 'grid_longitude':
                    lon_dim_coord = i

            csur = cs.ellipsoid

            pcube.remove_coord('grid_latitude')
            pcube.remove_coord('grid_longitude')
            pcube.add_dim_coord(
                iris.coords.DimCoord(points=lat,
                                     standard_name='grid_latitude',
                                     units='degrees',
                                     coord_system=csur), lat_dim_coord)
            pcube.add_dim_coord(
                iris.coords.DimCoord(points=lon,
                                     standard_name='grid_longitude',
                                     units='degrees',
                                     coord_system=csur), lon_dim_coord)

        lon_min = np.min(lons_g)
        lon_max = np.max(lons_g)

        lon_low_tick = lon_min - (lon_min % divisor)
        lon_high_tick = math.ceil(lon_max / divisor) * divisor

        lat_min = np.min(lats_g)
        lat_max = np.max(lats_g)
        lat_low_tick = lat_min - (lat_min % divisor)
        lat_high_tick = math.ceil(lat_max / divisor) * divisor

        print lon_high_tick
        print lon_low_tick

        pcube_regrid_data = scipy.interpolate.griddata(
            (lats.flatten(), lons.flatten()),
            pcube.data.flatten(), (lats_g, lons_g),
            method='linear')

        #pcube_regrid = iris.analysis.interpolate.linear(pcube, sample_points)
        #print pcube.data.flatten()
        pcube_regrid = glob.copy(data=pcube_regrid_data)
        pcubediff = pcube_regrid - glob
        #print pcube.data[0,0]
        #print pcube_regrid_data[0,0]
        #print pcubediff.data
        #print glob.data[0,0]
        plt.figure(figsize=(8, 8))

        cmap = cmap = plt.cm.RdBu_r

        ax = plt.axes(projection=ccrs.PlateCarree(),
                      extent=(lonmin_g + 2, lonmax_g - 2,
                              latmin + degs_crop_bottom,
                              latmax - degs_crop_top))

        clevs = np.linspace(min_contour, max_contour, 256)
        cont = iplt.contourf(pcubediff, clevs, cmap=cmap, extend='both')

        #plt.clabel(cont, fmt='%d')
        #ax.stock_img()
        ax.coastlines(resolution='110m', color='#262626')

        gl = ax.gridlines(draw_labels=True,
                          linewidth=0.5,
                          color='#262626',
                          alpha=0.5,
                          linestyle='--')
        gl.xlabels_top = False
        gl.ylabels_right = False
        #gl.xlines = False
        dx, dy = 10, 10

        gl.xlocator = mticker.FixedLocator(
            range(int(lon_low_tick),
                  int(lon_high_tick) + dx, dx))
        gl.ylocator = mticker.FixedLocator(
            range(int(lat_low_tick),
                  int(lat_high_tick) + dy, dy))
        gl.xformatter = LONGITUDE_FORMATTER
        gl.yformatter = LATITUDE_FORMATTER

        gl.xlabel_style = {'size': 12, 'color': 'black'}
        #gl.xlabel_style = {'color': '#262626', 'weight': 'bold'}
        gl.ylabel_style = {'size': 12, 'color': 'black'}

        cbar = plt.colorbar(cont,
                            orientation='horizontal',
                            pad=0.05,
                            extend='both',
                            format='%d')
        #cbar.set_label('')
        cbar.set_label(pcube.units, fontsize=10)
        cbar.set_ticks(
            np.arange(min_contour, max_contour + tick_interval, tick_interval))
        ticks = (np.arange(min_contour, max_contour + tick_interval,
                           tick_interval))
        cbar.set_ticklabels(['%d' % i for i in ticks])
        main_title = '%s - Difference' % pcube.standard_name.title().replace(
            '_', ' ')
        model_info = re.sub('(.{68} )', '\\1\n',
                            str(model_name_convert_title.main(experiment_id)),
                            0, re.DOTALL)
        model_info = re.sub(r'[(\']', ' ', model_info)
        model_info = re.sub(r'[\',)]', ' ', model_info)
        print model_info

        if not os.path.exists('%s%s/%s' % (save_path, experiment_id, pp_file)):
            os.makedirs('%s%s/%s' % (save_path, experiment_id, pp_file))

        plt.savefig(
            '%s%s/%s/%s_%s_notitle_diff.png' %
            (save_path, experiment_id, pp_file, experiment_id, pp_file),
            format='png',
            bbox_inches='tight')

        plt.title('\n'.join(
            wrap('%s\n%s' % (main_title, model_info),
                 1000,
                 replace_whitespace=False)),
                  fontsize=16)

        #plt.show()

        plt.savefig(
            '%s%s/%s/%s_%s_diff.png' %
            (save_path, experiment_id, pp_file, experiment_id, pp_file),
            format='png',
            bbox_inches='tight')

        plt.close()
#
#  321-340: full atmosphere
#
stash = 'm01s00i303'

# --- BELOW THIS LINE, NOTHING SHOULD NEED TO BE CHANGED ---

species_name = 'CO'

# this is the grid we want to regrid to, e.g. N96 ENDGame
grd = iris.load(grid_file)[0]
grd.coord(axis='x').guess_bounds()
grd.coord(axis='y').guess_bounds()

# This is the original data
ems = iris.load_cube(emissions_file)

# make intersection between 0 and 360 longitude to ensure that
# the data is regridded correctly
nems = ems.intersection(longitude=(0, 360))

# make sure that we use the same coordinate system, otherwise regrid won't work
nems.coord(axis='x').coord_system = grd.coord_system()
nems.coord(axis='y').coord_system = grd.coord_system()

# now guess the bounds of the new grid prior to regridding
nems.coord(axis='x').guess_bounds()
nems.coord(axis='y').guess_bounds()

# now regrid
ocube = nems.regrid(grd, iris.analysis.AreaWeighted())
Example #55
0
from __future__ import (absolute_import, division, print_function)
from six.moves import (filter, input, map, range, zip)  # noqa

import matplotlib.pyplot as plt

import iris
import iris.plot as iplt

fname = iris.sample_data_path('air_temp.pp')

# Load exactly one cube from the given file
temperature = iris.load_cube(fname)

# We are only interested in a small number of longitudes (the 4 after and
# including the 5th element), so index them out
temperature = temperature[5:9, :]

for cube in temperature.slices('longitude'):

    # Create a string label to identify this cube (i.e. latitude: value)
    cube_label = 'latitude: %s' % cube.coord('latitude').points[0]

    # Plot the cube, and associate it with a label
    iplt.plot(cube, label=cube_label)

# Match the longitude range to global
max_lon = temperature.coord('longitude').points.max()
min_lon = temperature.coord('longitude').points.min()
plt.xlim(min_lon, max_lon)

# Add the legend with 2 columns
Example #56
0
 def setUp(self):
     filename = tests.get_data_path(
         ('NetCDF', 'rotated', 'xy', 'rotPole_landAreaFraction.nc'))
     self.cube = iris.load_cube(filename)
# Lat:Lon aspect does not match the plot aspect, ignore this and
#  fill the figure with the plot.
matplotlib.rc('image', aspect='auto')

# Draw a lat:lon grid
mg.background.add_grid(ax, sep_major=5, sep_minor=2.5, color=(0, 0.3, 0, 0.2))

# Add the land
land_img = ax.background_img(name='GreyT', resolution='low')

# Get the wind data from the Meteorographica example_data
#   Mystical incantation to get filenames
udf = pkg_resources.resource_filename(
    pkg_resources.Requirement.parse('Meteorographica'),
    'example_data/20CR2c.1987101606.uwnd.10m.nc')
uwnd = iris.load_cube(udf)
vdf = pkg_resources.resource_filename(
    pkg_resources.Requirement.parse('Meteorographica'),
    'example_data/20CR2c.1987101606.vwnd.10m.nc')
vwnd = iris.load_cube(vdf)
#   Reduce to a single ensemble member
uwnd = uwnd.extract(iris.Constraint(member=1))
vwnd = vwnd.extract(iris.Constraint(member=1))

# Plot the wind vectors
mg.wind.plot(ax, uwnd, vwnd)

# Also pressure
edf = pkg_resources.resource_filename(
    pkg_resources.Requirement.parse('Meteorographica'),
    'example_data/20CR2c.1987101606.prmsl.nc')
def main(inargs):
    """Run the program."""

    var = inargs.pe_files[0].split('/')[-1].split('_')[0]
    assert var in ['pe', 'wfo']
    var_name = 'precipitation minus evaporation flux' if var == 'pe' else 'water_flux_into_sea_water'

    area_cube = gio.get_ocean_weights(
        inargs.area_file) if inargs.area_file else None
    pe_cube, pe_lats, pe_history = read_data(inargs.pe_files,
                                             var_name,
                                             area_cube,
                                             annual=inargs.annual,
                                             multiply_by_area=inargs.area,
                                             chunk_annual=inargs.chunk)
    basin_cube = iris.load_cube(inargs.basin_file, 'region')

    metadata = {
        inargs.pe_files[0]: pe_history[0],
        inargs.basin_file: basin_cube.attributes['history']
    }
    if inargs.data_var == 'cell_area':
        data_cube = iris.load_cube(inargs.data_files[0], 'cell_area')
        assert data_cube.shape == pe_cube.shape[1:]
    elif inargs.data_files:
        data_cube, data_lats, data_history = read_data(
            inargs.data_files,
            inargs.data_var,
            area_cube,
            annual=inargs.annual,
            multiply_by_area=inargs.area,
            chunk_annual=inargs.chunk)
        assert data_cube.shape == pe_cube.shape
        metadata[inargs.data_files[0]] = data_history[0]
    else:
        data_cube = pe_cube.copy()
        data_var = var_name

    if area_cube:
        area_data = area_cube.data
    else:
        if data_cube.ndim == 3:
            area_data = spatial_weights.area_array(data_cube[0, ::])
        else:
            assert data_cube.ndim == 2
            area_data = spatial_weights.area_array(data_cube)

    region_data = np.zeros([pe_cube.shape[0], 6, 8])
    tstep = 0
    ntimes = pe_cube.shape[0]
    for tstep in range(ntimes):
        var_data = data_cube.data if inargs.data_var == 'cell_area' else data_cube[
            tstep, ::].data
        region_data[tstep, :] = get_regional_aggregates(
            inargs.agg, var_data, pe_cube[tstep, ::].data, pe_lats,
            basin_cube.data, area_data)

    if inargs.cumsum:
        region_data = np.cumsum(region_data, axis=0)

    pe_region_coord = create_pe_region_coord()
    basin_coord = create_basin_coord()
    time_coord = pe_cube.coord('time')

    if inargs.data_var:
        standard_name = data_cube.standard_name
    elif var == 'pe':
        iris.std_names.STD_NAMES['precipitation_minus_evaporation_flux'] = {
            'canonical_units': pe_cube.units
        }
        standard_name = 'precipitation_minus_evaporation_flux'
    else:
        standard_name = pe_cube.standard_name
    atts = pe_cube.attributes if inargs.data_var == 'cell_area' else data_cube.attributes
    dim_coords_list = [(time_coord, 0), (pe_region_coord, 1), (basin_coord, 2)]
    out_cube = iris.cube.Cube(region_data,
                              standard_name=standard_name,
                              long_name=data_cube.long_name,
                              var_name=data_cube.var_name,
                              units=data_cube.units,
                              attributes=atts,
                              dim_coords_and_dims=dim_coords_list)

    out_cube.attributes['history'] = cmdprov.new_log(infile_history=metadata,
                                                     git_repo=repo_dir)
    iris.save(out_cube, inargs.outfile)
Example #59
0
# <codecell>

ncv.keys()

# <codecell>

lon = ncv['longitude'][:]
lat = ncv['latitude'][:]

# <codecell>

import iris

# <codecell>

t = iris.load_cube(url,'sea_water_temperature')

# <codecell>

print t

# <codecell>

lon=t.coord(axis='X')

# <codecell>

lat=t.coord(axis='Y')

# <codecell>
Example #60
0
def three_plot(ax, t2m, u10m, v10m, precip):
    ax.set_xlim(-180, 180)
    ax.set_ylim(-90, 90)
    ax.set_aspect('auto')
    ax.set_axis_off()  # Don't want surrounding x and y axis
    ax.add_patch(
        Rectangle((0, 0),
                  1,
                  1,
                  facecolor=(0.6, 0.6, 0.6, 1),
                  fill=True,
                  zorder=1))
    # Draw lines of latitude and longitude
    for lat in range(-90, 95, 5):
        lwd = 0.75
        x = []
        y = []
        for lon in range(-180, 181, 1):
            rp = iris.analysis.cartography.rotate_pole(numpy.array(lon),
                                                       numpy.array(lat), 180,
                                                       90)
            nx = rp[0] + 0
            if nx > 180: nx -= 360
            ny = rp[1]
            if (len(x) == 0
                    or (abs(nx - x[-1]) < 10 and abs(ny - y[-1]) < 10)):
                x.append(nx)
                y.append(ny)
            else:
                ax.add_line(
                    Line2D(x,
                           y,
                           linewidth=lwd,
                           color=(0.4, 0.4, 0.4, 1),
                           zorder=10))
                x = []
                y = []
        if (len(x) > 1):
            ax.add_line(
                Line2D(x,
                       y,
                       linewidth=lwd,
                       color=(0.4, 0.4, 0.4, 1),
                       zorder=10))

    for lon in range(-180, 185, 5):
        lwd = 0.75
        x = []
        y = []
        for lat in range(-90, 90, 1):
            rp = iris.analysis.cartography.rotate_pole(numpy.array(lon),
                                                       numpy.array(lat), 180,
                                                       90)
            nx = rp[0] + 0
            if nx > 180: nx -= 360
            ny = rp[1]
            if (len(x) == 0
                    or (abs(nx - x[-1]) < 10 and abs(ny - y[-1]) < 10)):
                x.append(nx)
                y.append(ny)
            else:
                ax.add_line(
                    Line2D(x,
                           y,
                           linewidth=lwd,
                           color=(0.4, 0.4, 0.4, 1),
                           zorder=10))
                x = []
                y = []
        if (len(x) > 1):
            ax.add_line(
                Line2D(x,
                       y,
                       linewidth=lwd,
                       color=(0.4, 0.4, 0.4, 1),
                       zorder=10))
    # Add the continents
    mask_pc = plot_cube(0.05)
    lsmask = iris.load_cube("%s/fixed_fields/land_mask/opfc_global_2019.nc" %
                            os.getenv('DATADIR'))
    lsmask = lsmask.regrid(mask_pc, iris.analysis.Linear())
    lats = lsmask.coord('latitude').points
    lons = lsmask.coord('longitude').points
    mask_img = ax.pcolorfast(lons,
                             lats,
                             lsmask.data,
                             cmap=matplotlib.colors.ListedColormap(
                                 ((0.4, 0.4, 0.4, 0), (0.4, 0.4, 0.4, 1))),
                             vmin=0,
                             vmax=1,
                             alpha=1.0,
                             zorder=20)

    # Calculate the wind noise
    wind_pc = plot_cube(0.5)
    rw = iris.analysis.cartography.rotate_winds(u10m, v10m, cs)
    u10m = rw[0].regrid(wind_pc, iris.analysis.Linear())
    v10m = rw[1].regrid(wind_pc, iris.analysis.Linear())
    wind_noise_field = wind_field(u10m, v10m, z, sequence=None, epsilon=0.01)

    # Plot the temperature
    t2m_pc = plot_cube(0.05)
    t2m = t2m.regrid(t2m_pc, iris.analysis.Linear())
    t2m = quantile_t2m(t2m)
    # Adjust to show the wind
    wscale = 200
    s = wind_noise_field.data.shape
    wind_noise_field.data = qcut(
        wind_noise_field.data.flatten(),
        wscale,
        labels=False,
        duplicates='drop').reshape(s) - (wscale - 1) / 2

    # Plot as a colour map
    wnf = wind_noise_field.regrid(t2m, iris.analysis.Linear())
    t2m_img = ax.pcolorfast(lons,
                            lats,
                            t2m.data * 200 + wnf.data,
                            cmap='RdYlBu_r',
                            alpha=0.8,
                            zorder=100)

    # Plot the precip
    precip_pc = plot_cube(0.25)
    precip = precip.regrid(precip_pc, iris.analysis.Linear())
    precip = normalise_precip(precip)
    wnf = wind_noise_field.regrid(precip, iris.analysis.Linear())
    precip.data += wnf.data / 1000
    cols = []
    for ci in range(10):
        cols.append([0.06885643, 0.14208946, 0.07903363, 0.0])
    for ci in range(50):
        cols.append([0.06885643, 0.14208946, 0.07903363, ci / 50])
    cm_data = [[0.06885643, 0.14208946, 0.07903363],
               [0.07022733, 0.145481, 0.08182252],
               [0.07158041, 0.14886551, 0.08459735],
               [0.07289843, 0.15224692, 0.08735815],
               [0.07419261, 0.15562329, 0.09010536],
               [0.07545752, 0.15899606, 0.0928391],
               [0.07669294, 0.16236561, 0.09555954],
               [0.0779044, 0.16573113, 0.09826708],
               [0.07908113, 0.16909506, 0.10096151],
               [0.08023865, 0.17245461, 0.10364358],
               [0.0813567, 0.17581401, 0.1063126],
               [0.08245885, 0.17916893, 0.10896978],
               [0.08351901, 0.18252464, 0.11161396],
               [0.08456274, 0.18587646, 0.1142466],
               [0.08556724, 0.18922894, 0.11686652],
               [0.08655243, 0.19257854, 0.119475],
               [0.08750046, 0.19592877, 0.12207102],
               [0.08842694, 0.19927694, 0.12465567],
               [0.08931759, 0.20262583, 0.12722804],
               [0.09018515, 0.2059733, 0.1297891],
               [0.09101742, 0.20932169, 0.13233799],
               [0.09182583, 0.21266913, 0.13487564],
               [0.09259864, 0.21601782, 0.13740115],
               [0.09334765, 0.21936582, 0.13991552],
               [0.09405985, 0.22271554, 0.14241766],
               [0.09474918, 0.22606468, 0.14490881],
               [0.09539954, 0.22941612, 0.14738752],
               [0.0960289, 0.23276689, 0.14985548],
               [0.09661612, 0.23612068, 0.15231064],
               [0.09718523, 0.23947356, 0.15475537],
               [0.09770794, 0.2428303, 0.1571868],
               [0.09821309, 0.24618625, 0.15960787],
               [0.09867327, 0.24954595, 0.16201567],
               [0.09911166, 0.25290573, 0.16441267],
               [0.09951033, 0.25626852, 0.16679686],
               [0.09988043, 0.25963262, 0.16916945],
               [0.1002173, 0.26299882, 0.17152986],
               [0.10051752, 0.2663677, 0.17387765],
               [0.10079233, 0.2697376, 0.17621409],
               [0.10102103, 0.27311168, 0.17853665],
               [0.10122817, 0.27648631, 0.18084823],
               [0.10138903, 0.2798652, 0.18314573],
               [0.10152195, 0.28324567, 0.18543137],
               [0.10161961, 0.28662882, 0.18770415],
               [0.10167654, 0.29001546, 0.18996328],
               [0.10170933, 0.29340326, 0.19221085],
               [0.10169001, 0.29679616, 0.19444309],
               [0.1016439, 0.30019064, 0.19666326],
               [0.10156046, 0.30358817, 0.19886988],
               [0.10143361, 0.30698958, 0.201062],
               [0.10127976, 0.31039267, 0.20324181],
               [0.1010766, 0.31380039, 0.20540608],
               [0.10083896, 0.31721078, 0.20755678],
               [0.10057112, 0.32062332, 0.20969446],
               [0.10024781, 0.32404118, 0.21181533],
               [0.09989443, 0.32746115, 0.21392297],
               [0.0995047, 0.33088402, 0.21601636],
               [0.09906254, 0.3343117, 0.218093],
               [0.0985884, 0.33774172, 0.2201558],
               [0.09807546, 0.34117488, 0.22220362],
               [0.09750943, 0.34461278, 0.22423417],
               [0.09690948, 0.3480532, 0.22625023],
               [0.09627239, 0.35149649, 0.22825119],
               [0.09557752, 0.35494489, 0.23023372],
               [0.09484678, 0.35839595, 0.23220107],
               [0.09407947, 0.36184972, 0.23415298],
               [0.09325665, 0.36530817, 0.23608636],
               [0.09239033, 0.36876996, 0.23800293],
               [0.09148554, 0.37223456, 0.23990332],
               [0.09053798, 0.37570238, 0.2417867],
               [0.08953167, 0.3791749, 0.24365037],
               [0.08848513, 0.38265026, 0.2454971],
               [0.08739776, 0.3861285, 0.24732662],
               [0.08626466, 0.38961004, 0.24913796],
               [0.08507286, 0.39309597, 0.25092886],
               [0.08383877, 0.39658475, 0.25270175],
               [0.0825619, 0.40007639, 0.25445635],
               [0.08124183, 0.4035709, 0.25619236],
               [0.07986463, 0.40706939, 0.25790734],
               [0.07843839, 0.41057109, 0.25960234],
               [0.07696833, 0.41407555, 0.26127794],
               [0.07545435, 0.41758275, 0.26293384],
               [0.07389642, 0.42109266, 0.26456976],
               [0.07228966, 0.42460561, 0.26618462],
               [0.07062863, 0.42812193, 0.26777721],
               [0.06892492, 0.43164076, 0.26934902],
               [0.06717918, 0.43516204, 0.27089974],
               [0.06539233, 0.43868571, 0.27242909],
               [0.06356557, 0.44221171, 0.27393677],
               [0.06170041, 0.44573995, 0.27542249],
               [0.05979877, 0.44927035, 0.27688596],
               [0.05785702, 0.45280318, 0.27832599],
               [0.05588066, 0.45633814, 0.27974269],
               [0.05387746, 0.45987489, 0.28113643],
               [0.05185174, 0.4634133, 0.28250695],
               [0.04980869, 0.46695324, 0.28385397],
               [0.04775453, 0.47049459, 0.28517723],
               [0.04569662, 0.47403719, 0.28647649],
               [0.04364369, 0.47758089, 0.28775151],
               [0.04160606, 0.48112551, 0.28900203],
               [0.0395867, 0.48467089, 0.29022786],
               [0.03763814, 0.48821682, 0.29142876],
               [0.03580155, 0.49176311, 0.29260454],
               [0.03408899, 0.49530953, 0.29375502],
               [0.03251323, 0.49885586, 0.29488003],
               [0.03108771, 0.50240186, 0.29597942],
               [0.02982666, 0.50594726, 0.29705307],
               [0.02874508, 0.50949179, 0.29810087],
               [0.02785177, 0.51303546, 0.29912154],
               [0.02716441, 0.51657792, 0.3001151],
               [0.02670582, 0.52011863, 0.30108235],
               [0.02649479, 0.52365722, 0.30202332],
               [0.02655111, 0.52719334, 0.30293807],
               [0.0268957, 0.53072659, 0.30382669],
               [0.02754542, 0.53425678, 0.30468824],
               [0.02851433, 0.53778386, 0.30552072],
               [0.02984163, 0.54130675, 0.30632735],
               [0.03155357, 0.54482496, 0.30710846],
               [0.03367174, 0.54833822, 0.3078629],
               [0.03621581, 0.55184641, 0.30858828],
               [0.03923368, 0.55534819, 0.3092893],
               [0.04267874, 0.55884313, 0.30996532],
               [0.04642777, 0.56233118, 0.31061247],
               [0.05049037, 0.56581074, 0.31123743],
               [0.05483677, 0.56928169, 0.31183596],
               [0.05945722, 0.57274288, 0.31241135],
               [0.06433628, 0.57619346, 0.3129641],
               [0.0694603, 0.57963256, 0.31349439],
               [0.07481987, 0.58305904, 0.31400453],
               [0.08040588, 0.58647176, 0.31449636],
               [0.08620977, 0.58986967, 0.3149695],
               [0.09222724, 0.59325117, 0.31543058],
               [0.0984523, 0.59661516, 0.31587743],
               [0.10488194, 0.59995997, 0.31631578],
               [0.11151261, 0.60328396, 0.31675059],
               [0.11834151, 0.60658549, 0.31718585],
               [0.12536778, 0.6098628, 0.31762468],
               [0.13258816, 0.61311399, 0.31807501],
               [0.13999895, 0.6163372, 0.31854464],
               [0.14759587, 0.61953055, 0.31904185],
               [0.15537304, 0.62269226, 0.31957617],
               [0.16332236, 0.62582066, 0.32015839],
               [0.17143307, 0.62891434, 0.32080039],
               [0.17969137, 0.63197223, 0.32151494],
               [0.18808025, 0.63499369, 0.32231537],
               [0.19658051, 0.63797853, 0.32321469],
               [0.20516717, 0.6409274, 0.32422681],
               [0.21381461, 0.64384147, 0.32536378],
               [0.22249597, 0.64672254, 0.32663605],
               [0.23118418, 0.64957297, 0.32805203],
               [0.23985293, 0.65239558, 0.32961779],
               [0.24847602, 0.65519373, 0.33133728],
               [0.25703342, 0.65797057, 0.33321113],
               [0.26550717, 0.66072943, 0.3352381],
               [0.2738829, 0.66347352, 0.33741504],
               [0.28214978, 0.66620584, 0.33973735],
               [0.29030037, 0.66892916, 0.34219931],
               [0.29833024, 0.67164589, 0.34479452],
               [0.30623756, 0.67435818, 0.34751622],
               [0.31401727, 0.67706862, 0.35035755],
               [0.32167446, 0.67977832, 0.35331149],
               [0.32921313, 0.68248841, 0.35637146],
               [0.33663762, 0.6851999, 0.35953132],
               [0.34394207, 0.68791541, 0.36278437],
               [0.35114051, 0.69063416, 0.36612568],
               [0.35823533, 0.69335721, 0.36954994],
               [0.36522563, 0.69608614, 0.37305153],
               [0.37212492, 0.69881984, 0.37662736],
               [0.37892642, 0.70156084, 0.38027132],
               [0.3856453, 0.70430747, 0.38398168],
               [0.39227735, 0.70706166, 0.38775317],
               [0.39883103, 0.70982284, 0.39158356],
               [0.40531204, 0.71259083, 0.39547048],
               [0.41171522, 0.7153677, 0.39940867],
               [0.41805337, 0.71815166, 0.40339835],
               [0.42432867, 0.72094309, 0.40743704],
               [0.43053587, 0.72374398, 0.41151964],
               [0.43668612, 0.72655264, 0.41564705],
               [0.44278167, 0.72936931, 0.41981738],
               [0.44882469, 0.73219416, 0.42402884],
               [0.45481188, 0.73502867, 0.42827737],
               [0.46075114, 0.73787164, 0.43256386],
               [0.46664458, 0.74072315, 0.43688699],
               [0.47249405, 0.74358334, 0.44124545],
               [0.47830134, 0.74645233, 0.44563802],
               [0.48406816, 0.74933022, 0.45006358],
               [0.48979527, 0.75221736, 0.45452056],
               [0.49548385, 0.75511394, 0.45900769],
               [0.50113714, 0.75801961, 0.46352502],
               [0.50675657, 0.76093444, 0.46807172],
               [0.51234353, 0.76385852, 0.47264701],
               [0.51789934, 0.76679191, 0.47725017],
               [0.52342527, 0.76973468, 0.48188052],
               [0.52892256, 0.77268689, 0.48653744],
               [0.53439237, 0.7756486, 0.49122035],
               [0.53983582, 0.77861987, 0.4959287],
               [0.54525401, 0.78160076, 0.500662],
               [0.55064796, 0.78459132, 0.50541978],
               [0.55601867, 0.78759161, 0.5102016],
               [0.56136709, 0.79060167, 0.51500705],
               [0.56669416, 0.79362156, 0.51983577],
               [0.57200073, 0.79665133, 0.5246874],
               [0.57728767, 0.79969103, 0.52956161],
               [0.5825556, 0.80274077, 0.53445795],
               [0.587804, 0.80580103, 0.53937491],
               [0.59303502, 0.80887141, 0.54431345],
               [0.59824938, 0.81195197, 0.54927334],
               [0.60344779, 0.81504274, 0.55425434],
               [0.60863092, 0.8181438, 0.55925625],
               [0.61379944, 0.82125518, 0.56427887],
               [0.61895395, 0.82437694, 0.56932201],
               [0.62409506, 0.82750913, 0.57438551],
               [0.62922216, 0.83065222, 0.5794679],
               [0.63433574, 0.83380631, 0.58456889],
               [0.63943758, 0.83697102, 0.58968966],
               [0.64452821, 0.84014641, 0.59483008],
               [0.64960814, 0.84333252, 0.59999004],
               [0.65467785, 0.84652942, 0.60516941],
               [0.65973664, 0.8497376, 0.61036667],
               [0.66478454, 0.8529573, 0.61558114],
               [0.66982361, 0.85618797, 0.62081462],
               [0.67485427, 0.85942969, 0.62606703],
               [0.67987695, 0.8626825, 0.6313383],
               [0.68489103, 0.86594688, 0.63662701],
               [0.68989598, 0.86922324, 0.64193178],
               [0.69489414, 0.87251091, 0.64725509],
               [0.69988588, 0.87580994, 0.65259689],
               [0.70487156, 0.87912041, 0.6579571],
               [0.70984902, 0.88244339, 0.6633321],
               [0.71482059, 0.88577817, 0.66872453],
               [0.71978715, 0.88912459, 0.67413514],
               [0.72474903, 0.89248272, 0.67956388],
               [0.72970443, 0.89585351, 0.68500748],
               [0.73465479, 0.89923659, 0.69046752],
               [0.73960143, 0.90263159, 0.69594546],
               [0.74454464, 0.90603859, 0.70144129],
               [0.74948217, 0.90945876, 0.70695082],
               [0.75441627, 0.91289135, 0.71247711],
               [0.75934783, 0.91633616, 0.71802108],
               [0.76427661, 0.91979348, 0.72358186],
               [0.76920074, 0.92326438, 0.72915567],
               [0.77412316, 0.92674772, 0.73474699],
               [0.77904414, 0.93024359, 0.74035579],
               [0.78396177, 0.93375306, 0.74597816],
               [0.78887747, 0.93727567, 0.75161605],
               [0.79379251, 0.94081106, 0.75727124],
               [0.79870604, 0.9443598, 0.76294165],
               [0.80361734, 0.94792244, 0.76862535],
               [0.80852873, 0.9514981, 0.77432618],
               [0.81344004, 0.95508703, 0.7800433],
               [0.81834917, 0.95869047, 0.78577205],
               [0.82325913, 0.96230716, 0.79151775],
               [0.82817006, 0.96593722, 0.79728024],
               [0.8330792, 0.96958222, 0.80305326],
               [0.83798987, 0.97324073, 0.80884306],
               [0.84290226, 0.97691284, 0.81464957]]
    for c in cm_data:
        c.append(1.0)
    cols = cols + cm_data
    precip_img = ax.pcolorfast(lons,
                               lats,
                               precip.data,
                               cmap=matplotlib.colors.ListedColormap(cols),
                               vmin=0,
                               vmax=1,
                               alpha=0.8,
                               zorder=200)