def load_cubes(self): """ Use iris.load_cubes to read multiple netcdf files. """ file_names = ['cube1.nc', 'cube2.nc'] file_names = [os.path.join(self.data_dir, f) for f in file_names] iris.load_cubes(file_names, ['air_temperature', 'water_temperature'])
def check_strattrop(infile): """ Check if the model output is from a StratTrop(CheST) or TropIsop (CheT) run. Method:- Crude, checks for following species in file: ClO, N2O --> CheST; NOy (34004) --> CheT Returns True for StratTrop/ CheST """ stash_codes_str = ['m01s34i042', 'm01s34i049'] # ClO, N2O stc_trop = 'm01s34i004' # NO2/NOy fieldcons = [] # List of stash constraints for spc in stash_codes_str: fieldcons.append(iris.AttributeConstraint(STASH=spc)) stcubes = iris.load_cubes(infile, constraints=fieldcons) if len(stcubes) == len(stash_codes_str): stcubes = 'a' return True else: # Assume not StratTrop - confirm if Trop fieldcons = iris.AttributeConstraint(STASH=stc_trop) trcube = iris.load_cube(infile, constraints=fieldcons) if trcube != None: trcube = 'a' return False else: # Something wrong, not all diagnostics in place print 'CHK_STRATTROP: Diagnostics missing -Not able to determine' return True # Default --may cause failure in some scripts
def gen_or_load_2D(filename, data_functions, names, params={}, units='1', **kwargs): '''load data from filename if it exists, otherwise generate data and save it Arguments: * filename: name of file to load from or write to * data_functions: list of functions to use to create 2D cubes * names: list of names to give each cube ''' import os if not os.path.exists(filename): cubes = iris.cube.CubeList() for data_function, name in zip(data_functions, names): cube = gen_2D_cube_for_testing(data_function, **kwargs) cube.long_name = name cube.units = units cube.attributes.update(params) cubes.append(cube) iris.save(cubes, filename) else: cubes = iris.load_cubes(filename, names) for cube in cubes: assert params == register_params(cube.attributes) #assert cube.shape == CUBE_SHAPE return cubes
def test_landsea_unpacking_uses_dask(self): # Ensure that the graph of the (lazy) landsea-masked data contains an # explicit reference to a (lazy) landsea-mask field. # Otherwise its compute() will need to invoke another compute(). # See https://github.com/SciTools/iris/issues/3237 # This is too complex to explore in a mock-ist way, so let's load a # tiny bit of real data ... testfile_path = tests.get_data_path( ['FF', 'landsea_masked', 'testdata_mini_lsm.ff']) landsea_mask, soil_temp = iris.load_cubes( testfile_path, ('land_binary_mask', 'soil_temperature')) # Now check that the soil-temp dask graph correctly references the # landsea mask, in its dask graph. lazy_mask_array = landsea_mask.core_data() lazy_soildata_array = soil_temp.core_data() # Work out the main dask key for the mask data, as used by 'compute()'. mask_toplev_key = ( lazy_mask_array.name, ) + (0, ) * lazy_mask_array.ndim # Get the 'main' calculation entry. mask_toplev_item = lazy_mask_array.dask[mask_toplev_key] # This should be a task (a simple fetch). self.assertTrue(callable(mask_toplev_item[0])) # Get the key (name) of the array that it fetches. mask_data_name = mask_toplev_item[1] # Check that the item this refers to is a PPDataProxy. self.assertIsInstance(lazy_mask_array.dask[mask_data_name], pp.PPDataProxy) # Check that the soil-temp graph references the *same* lazy element, # showing that the mask+data calculation is handled by dask. self.assertIn(mask_data_name, lazy_soildata_array.dask.keys())
def main(fdir): fdirs = [os.path.join(fdir, path) for path in ['pressure/*', 'wind/*']] pressure, direction, wind = iris.load_cubes( fdirs, ['mslpressure', 'Direction', 'Windspeed']) # Turn these generators into lists, so that the animation can be repeated pressure_slices = list(pressure.slices(['projection_y_coordinate', 'projection_x_coordinate'])) # Correct pressure units and convert to millibars. for slice in pressure_slices: slice.units = 'mb/10' slice.convert_units('mb') direction_slices = list(direction.slices(['projection_y_coordinate', 'projection_x_coordinate'])) wind_slices = list(wind.slices(['projection_y_coordinate', 'projection_x_coordinate'])) # Correct wind speed units and convert to m/s. for slice in wind_slices: slice.units = 'knot/10' slice.convert_units('m/s') frames = xrange(len(pressure_slices)) fig = plt.gcf() ani = animation.FuncAnimation(fig, animation_plot, frames=frames, fargs=(pressure_slices, wind_slices, direction_slices), interval=200) print 'saving...' ani.save('wind.avi', bitrate=5000) print 'completed' plt.show()
def calc_merra(run): """Use MERRA as obs to compare.""" # Load data merrafile = os.path.join(run['clim_root'], 'ERA-Interim_cubeList.nc') (t, q) = iris.load_cubes(merrafile, ['air_temperature', 'specific_humidity']) # Strip out required times time = iris.Constraint( time=lambda cell: run['from_monthly'] <= cell.point <= run['to_monthly'] ) with iris.FUTURE.context(cell_datetime_objects=True): t = t.extract(time) q = q.extract(time) # zonal mean t_cds = [cdt.standard_name for cdt in t.coords()] if 'longitude' in t_cds: t = t.collapsed('longitude', iris.analysis.MEAN) q_cds = [cdt.standard_name for cdt in q.coords()] if 'longitude' in q_cds: q = q.collapsed('longitude', iris.analysis.MEAN) # mean over tropics equator = iris.Constraint(latitude=lambda lat: -10 <= lat <= 10) p100 = iris.Constraint(air_pressure=10000.) t = t.extract(equator & p100) # Calculate area-weighted global monthly means from multi-annual data iris.coord_categorisation.add_month(t, 'time', name='month') t = t.aggregated_by('month', iris.analysis.MEAN) if 'longitude' in t_cds: t = weight_lat_ave(t) else: t = weight_cosine(t) # Extract 10S-10N humidity at 100hPa tropics = iris.Constraint(latitude=lambda lat: -10 <= lat <= 10) p70 = iris.Constraint(air_pressure=7000.) q = q.extract(tropics & p70) # Calculate area-weighted global monthly means from multi-annual data iris.coord_categorisation.add_month(q, 'time', name='month') q = q.aggregated_by('month', iris.analysis.MEAN) if 'longitude' in q_cds: q = weight_lat_ave(q) else: q = weight_cosine(q) # Calculate time mean t = t.collapsed('time', iris.analysis.MEAN) q = q.collapsed('time', iris.analysis.MEAN) # Create return values tmerra = t.data # K # TODO magic numbers qmerra = ((1000000. * 29. / 18.) * q.data) # ppmv return tmerra, qmerra
def main(): fname = iris.sample_data_path("colpex.pp") # The list of phenomena of interest phenomena = ["air_potential_temperature", "air_pressure"] # Define the constraint on standard name and model level constraints = [ iris.Constraint(phenom, model_level_number=1) for phenom in phenomena ] air_potential_temperature, air_pressure = iris.load_cubes( fname, constraints ) # Define a coordinate which represents 1000 hPa p0 = coords.AuxCoord(1000, long_name="P0", units="hPa") # Convert reference pressure 'p0' into the same units as 'air_pressure' p0.convert_units(air_pressure.units) # Calculate Exner pressure exner_pressure = (air_pressure / p0) ** (287.05 / 1005.0) # Set the name (the unit is scalar) exner_pressure.rename("exner_pressure") # Calculate air_temp air_temperature = exner_pressure * air_potential_temperature # Set the name (the unit is K) air_temperature.rename("air_temperature") # Now create an iterator which will give us lat lon slices of # exner pressure and air temperature in the form # (exner_slice, air_temp_slice). lat_lon_slice_pairs = iris.iterate.izip( exner_pressure, air_temperature, coords=["grid_latitude", "grid_longitude"], ) # For the purposes of this example, we only want to demonstrate the first # plot. lat_lon_slice_pairs = [next(lat_lon_slice_pairs)] plt.figure(figsize=(8, 4)) for exner_slice, air_temp_slice in lat_lon_slice_pairs: plt.subplot(121) cont = qplt.contourf(exner_slice) # The default colorbar has a few too many ticks on it, causing text to # overlap. Therefore, limit the number of ticks. limit_colorbar_ticks(cont) plt.subplot(122) cont = qplt.contourf(air_temp_slice) limit_colorbar_ticks(cont) iplt.show()
def main(): fname = iris.sample_data_path('colpex.pp') # The list of phenomena of interest phenomena = ['air_potential_temperature', 'air_pressure'] # Define the constraint on standard name and model level constraints = [iris.Constraint(phenom, model_level_number=1) for phenom in phenomena] air_potential_temperature, air_pressure = iris.load_cubes(fname, constraints) # Define a coordinate which represents 1000 hPa p0 = coords.AuxCoord(1000, long_name='P0', units='hPa') # Convert reference pressure 'p0' into the same units as 'air_pressure' p0.convert_units(air_pressure.units) # Calculate Exner pressure exner_pressure = (air_pressure / p0) ** (287.05 / 1005.0) # Set the name (the unit is scalar) exner_pressure.rename('exner_pressure') # Calculate air_temp air_temperature = exner_pressure * air_potential_temperature # Set the name (the unit is K) air_temperature.rename('air_temperature') # Now create an iterator which will give us lat lon slices of # exner pressure and air temperature in the form # (exner_slice, air_temp_slice). lat_lon_slice_pairs = iris.iterate.izip(exner_pressure, air_temperature, coords=['grid_latitude', 'grid_longitude']) plt.figure(figsize=(8, 4)) for exner_slice, air_temp_slice in lat_lon_slice_pairs: plt.subplot(121) cont = qplt.contourf(exner_slice) # The default colorbar has a few too many ticks on it, causing text to # overlap. Therefore, limit the number of ticks. limit_colorbar_ticks(cont) plt.subplot(122) cont = qplt.contourf(air_temp_slice) limit_colorbar_ticks(cont) plt.show() # For the purposes of this example, break after the first loop - we # only want to demonstrate the first plot. break
def test_load_cubes(self): flds = self.fields(c_h='0123') file = self.save_fieldcubes(flds) height_constraints = [ iris.Constraint(height=300.0), iris.Constraint(height=lambda h: 150.0 < h < 350.0), iris.Constraint('air_temperature')] results = iris.load_cubes(file, height_constraints) expected = CubeList([flds[2], CubeList(flds[1:3]).merge_cube(), CubeList(flds).merge_cube()]) self.assertEqual(results, expected)
def load_cube(paths, variable_name=None): """Read datasets from paths into Iris cubes. Combines cubes if there are more than one dataset in the same file. Returns a list of lists. Inner lists corresponds to the areas (in order), outer lists corresponds to the paths """ if isinstance(paths, (str, pathlib.Path)): if variable_name: cubes = iris.load_cubes(str(paths), constraints=variable_name) else: cubes = iris.load_cubes(str(paths)) else: if variable_name: cubes = iris.load([str(path) for path in paths], constraints=variable_name) else: cubes = iris.load([str(path) for path in paths]) # Select only the cubes with 3/4D data (time, lat, long, height) cubes = iris.cube.CubeList( [cube for cube in cubes if len(cube.coords()) >= 3]) if len(cubes) == 0: return None equalise_attributes(cubes) unify_time_units(cubes) try: cube = cubes.concatenate_cube() except iris.exceptions.ConcatenateError as exc: logger.warning("%s for %s", exc, str(paths)) logger.warning("Using only the first cube of [%s]", cubes) cube = cubes[ 0] # iris.load always returns a cubelist, so just take the first element return cube
def test_hh_round_trip(self): filepath = self.get_testdata_path('faked_sample_hh_grib_data.grib2') # Load and save temperature cube and reference (orography) cube # separately because this is the only way to save the hybrid height # coordinate. cube, ref_cube = load_cubes(filepath, ('air_temperature', 'surface_altitude')) with self.temp_filename() as tmp_save_path: save([cube, ref_cube], tmp_save_path, saver='grib2') # Only need to reload temperature cube to compare with unsaved # temperature cube. saved_cube = load_cube(tmp_save_path, 'air_temperature') self.assertTrue(saved_cube == cube)
def test_load_cubes(self): flds = self.fields(c_h='0123') file = self.save_fieldcubes(flds) height_constraints = [ iris.Constraint(height=300.0), iris.Constraint(height=lambda h: 150.0 < h < 350.0), iris.Constraint('air_temperature') ] results = iris.load_cubes(file, height_constraints) expected = CubeList([ flds[2], CubeList(flds[1:3]).merge_cube(), CubeList(flds).merge_cube() ]) self.assertEqual(results, expected)
def filespecs_cubes(filespecs, raw, constraints, callback=None): # Return cubes from filespecs. for filespec in filespecs: print "filespec:", filespec # Return cubes from filespec. Could be multiple files and cubes. for filename in glob.iglob(filespec): if filename != filespec: print " filename:", filename if raw is True: cubes = iris.load_raw(filename, constraints=constraints, callback=callback) else: cubes = iris.load_cubes(filename, constraints=constraints, callback=callback) for cube in cubes: yield cube
def calc_erai(run): """Use ERA-Interim as obs to compare.""" # Load data eraifile = os.path.join(run['clim_root'], 'ERA-Interim_cubeList.nc') (t, q) = iris.load_cubes(eraifile, ['air_temperature', 'specific_humidity']) # Strip out required times time = iris.Constraint(time=lambda cell: run['from_monthly'] <= cell.point <= run['to_monthly']) t = t.extract(time) q = q.extract(time) # Calculate time mean t = t.collapsed('time', iris.analysis.MEAN) q = q.collapsed('time', iris.analysis.MEAN) # Create return values terai = t.data # K qerai = ((1000000. * 29. / 18.) * q.data) # ppmv return terai, qerai
def test_hybrid_pressure(self): filepath = self.get_testdata_path('faked_sample_hp_grib_data.grib2') # Load and save temperature cube and reference (air_pressure at # surface) cube separately because this is the only way to save the # hybrid pressure coordinate. cube, ref_cube = load_cubes(filepath, ('air_temperature', 'air_pressure')) with self.temp_filename() as tmp_save_path: save([cube, ref_cube], tmp_save_path, saver='grib2') # Only need to reload temperature cube to compare with unsaved # temperature cube. saved_cube = load_cube(tmp_save_path, 'air_temperature') # Currently all attributes are lost when saving to grib, so we must # equalise them in order to successfully compare all other aspects. equalise_attributes([saved_cube, cube]) self.assertTrue(saved_cube == cube)
def get_data(self, gfs_vars, units=None, delta=None, apply_domain=True): if delta is None: timestamp = self.fct_timestamp else: if not isinstance(delta, dt.timedelta): raise ValueError("Expecting datetime.timedelta object") ts = dt.datetime.strptime(self.fct_timestamp, "%Y%m%d%H") + delta timestamp = ts.strftime("%Y%m%d%H") # Load data with warnings.catch_warnings(): # Suppress warnings warnings.simplefilter("ignore") cubes = iris.load_cubes(self.get_file_path(timestamp), gfs_vars) rtn = iris.cube.CubeList() for c in cubes: # Rewrap longitude c = c.intersection(longitude=(-180, 180)) if units is not None: # Convert units c.convert_units(units) # Constrain to specified forecast hour for this chart fct_date = dt.datetime.strptime(c.attributes['initial_time'], '%m/%d/%Y (%H:%M)') time_constraint = gfs_utils.get_time_constraint( fct_date, self.fct_hour) c = c.extract(time_constraint) if apply_domain: # Constrain to specified domain domain_constraint = gfs_utils.get_domain_constraint( self.domain) c = c.extract(domain_constraint) rtn.append(c) return rtn if len(rtn) > 1 else rtn[0]
def main(fdir): fdirs = [os.path.join(fdir, path) for path in ['pressure/*', 'wind/*']] pressure, direction, wind = iris.load_cubes( fdirs, ['mslpressure', 'Direction', 'Windspeed']) # Turn these generators into lists, so that the animation can be repeated pressure_slices = list( pressure.slices(['projection_y_coordinate', 'projection_x_coordinate'])) # Correct pressure units and convert to millibars. for slice in pressure_slices: slice.units = 'mb/10' slice.convert_units('mb') direction_slices = list( direction.slices( ['projection_y_coordinate', 'projection_x_coordinate'])) wind_slices = list( wind.slices(['projection_y_coordinate', 'projection_x_coordinate'])) # Correct wind speed units and convert to m/s. for slice in wind_slices: slice.units = 'knot/10' slice.convert_units('m/s') frames = xrange(len(pressure_slices)) fig = plt.gcf() ani = animation.FuncAnimation(fig, animation_plot, frames=frames, fargs=(pressure_slices, wind_slices, direction_slices), interval=200) print 'saving...' ani.save('wind.avi', bitrate=5000) print 'completed' plt.show()
def test_path_object(self): paths = (pathlib.Path( tests.get_data_path(["PP", "aPPglob1", "global.pp"])), ) cubes = iris.load_cubes(paths) self.assertEqual(len(cubes), 1)
def test_normal(self): paths = (tests.get_data_path(["PP", "aPPglob1", "global.pp"]), ) cubes = iris.load_cubes(paths) self.assertEqual(len(cubes), 1)
def test_normal(self): paths = (tests.get_data_path(["PP", "aPPglob1", "global.pp"]),) cubes = iris.load_cubes(paths) self.assertEqual(len(cubes), 1)
def test_not_enough(self): paths = (tests.get_data_path(["PP", "aPPglob1", "global.pp"]),) with self.assertRaises(iris.exceptions.ConstraintMismatchError): iris.load_cubes(paths, "wibble")
def test_not_enough_multi(self): paths = ( tests.get_data_path(['PP', 'aPPglob1', 'global.pp']), ) with self.assertRaises(iris.exceptions.ConstraintMismatchError): iris.load_cubes(paths, ('air_temperature', 'wibble'))
# Importe as dependencias from __future__ import unicode_literals import matplotlib.pyplot as plt import iris import iris.plot as iplt import iris.quickplot as qplt import netCDF4 from datetime import datetime, timedelta import os, sys, string import numpy as np _author_ = 'Ueslei Adriano Sutil' _email_ = '*****@*****.**' _created_ = datetime(2017, 03, 20) _modified_ = datetime(2017, 03, 20) _version_ = "0.1.0" _status_ = "Development" # Abra o arquivo e carregue as variaveis. nc = '/home/uesleisutil/Documentos/python_scripts/myocean_anita.nc' theta = iris.load_cubes('/home/uesleisutil/Documentos/python_scripts/myocean_anita.nc')
def test_not_enough_multi(self): paths = (tests.get_data_path(['PP', 'aPPglob1', 'global.pp']), ) with self.assertRaises(iris.exceptions.ConstraintMismatchError): iris.load_cubes(paths, ('air_temperature', 'wibble'))
# ani.save(ani_path, writer='ffmpeg') # # if show_ani: # print 'Showing...' # ani = animation.ArtistAnimation( # figure, per_image_artists, # interval=250, repeat=True, repeat_delay=5000, ## blit=True # ) # plt.show(block=True) if __name__ == '__main__': # simpletest() # get some basic data airtemp_data, precip_data = iris.load_cubes('/data/local/dataZoo/PP/decadal/*.pp', ['air_temperature', 'precipitation_flux']) # create a rolling map from these n_frames = 12 # i_images = [int(x) for x in np.linspace(0, airtemp_raw.shape[0], n_frames, endpoint=False)] # airtemp_data = airtemp_raw[i_images] airtemp_data = airtemp_data[0:n_frames+1] precip_data = precip_data[0:n_frames+1] units_degC = iris.unit.Unit('degC') airtemp_data.data = airtemp_data.units.convert(airtemp_data.data, units_degC) airtemp_data.units = units_degC rotating_sequence(airtemp_cubes=airtemp_data, precip_cubes=precip_data, n_steps_round=airtemp_data.shape[0]) #>>> for x in pf: #... plt.clf() #... plt.axes(projection=ccrs.PlateCarree());plt.gca().stock_img()
def test_not_enough_multi(self): paths = (tests.get_data_path(["PP", "aPPglob1", "global.pp"]), ) with self.assertRaises(iris.exceptions.ConstraintMismatchError): iris.load_cubes(paths, ("air_temperature", "wibble"))
def test_normal(self): paths = ( tests.get_data_path(['PP', 'aPPglob1', 'global.pp']), ) cubes = iris.load_cubes(paths) self.assertEqual(len(cubes), 1)
def test_normal(self): paths = (tests.get_data_path(['PP', 'aPPglob1', 'global.pp']), ) cubes = iris.load_cubes(paths) self.assertEqual(len(cubes), 1)
import iris import iris.coords as coords import iris.coord_categorisation from iris.analysis.interpolate import linear import cartopy.crs as ccrs diagnostic = '30181.pp' flist = glob.glob ('/projects/cascade/pwille/moose_retrievals/*/*/%s' % diagnostic) for i in flist: fname = str(i) l_s_r_rate, t_tot_incr = iris.load_cubes(fname, ['stratiform_rainfall_rate', 'tendency_of_air_temperature']) experiment_id = fname.split('/')[6] #iris.coord_categorisation.add_day_of_year(p_at_msl, 'forecast_reference_time', name='dayyear') # forecast_period messes up aggergation sometimes so remove. Probably need to comment out for time of day # http://nbviewer.ipython.org/github/SciTools/iris_example_code/blob/master/coord_categorisation.ipynb # Because some model outputs have time as a 2-D aux coord, as opposed to a 1-D dim coord, the standard iris categorisation by day, year etc throws an error. Add_categorised_coord allows categorisation of 2-dimensional arrays. # Get year from time coord. Function to use in add_categorised_coord below