def add_extra_time_coords(cube): """ Adds new coordinate for indexing a given simulation based on model and ensemble and adds additional time coordinates for unit manipulation """ if not cube.coords('year'): icc.add_year(cube, 'time') if not cube.coords('month'): icc.add_month(cube, 'time') if not cube.coords('month_number'): icc.add_month_number(cube, 'time') if not cube.coords('day_of_month'): icc.add_day_of_month(cube, 'time') if not cube.coords('hour'): icc.add_hour(cube, 'time') return cube
def test_basic(self): cube = self.cube time_coord = self.time_coord ccat.add_year(cube, time_coord, 'my_year') ccat.add_day_of_month(cube, time_coord, 'my_day_of_month') ccat.add_day_of_year(cube, time_coord, 'my_day_of_year') ccat.add_month(cube, time_coord, 'my_month') with warnings.catch_warnings(record=True): ccat.add_month_shortname(cube, time_coord, 'my_month_shortname') ccat.add_month_fullname(cube, time_coord, 'my_month_fullname') ccat.add_month_number(cube, time_coord, 'my_month_number') ccat.add_weekday(cube, time_coord, 'my_weekday') ccat.add_weekday_number(cube, time_coord, 'my_weekday_number') with warnings.catch_warnings(record=True): ccat.add_weekday_shortname(cube, time_coord, 'my_weekday_shortname') ccat.add_weekday_fullname(cube, time_coord, 'my_weekday_fullname') ccat.add_season(cube, time_coord, 'my_season') ccat.add_season_number(cube, time_coord, 'my_season_number') with warnings.catch_warnings(record=True): ccat.add_season_month_initials(cube, time_coord, 'my_season_month_initials') ccat.add_season_year(cube, time_coord, 'my_season_year') # also test 'generic' categorisation interface def _month_in_quarter(coord, pt_value): date = coord.units.num2date(pt_value) return (date.month - 1) % 3 ccat.add_categorised_coord(cube, 'my_month_in_quarter', time_coord, _month_in_quarter) # To ensure consistent results between 32-bit and 64-bit # platforms, ensure all the numeric categorisation coordinates # are always stored as int64. for coord in cube.coords(): if coord.long_name is not None and coord.points.dtype.kind == 'i': coord.points = coord.points.astype(np.int64) # check values self.assertCML(cube, ('categorisation', 'quickcheck.cml'))
def test_basic(self): #make a series of 'day numbers' for the time, that slide across month boundaries day_numbers = np.arange(0, 600, 27, dtype=np.int32) cube = iris.cube.Cube(day_numbers, long_name='test cube', units='metres') #use day numbers as data values also (don't actually use this for anything) cube.data = day_numbers time_coord = iris.coords.DimCoord( day_numbers, standard_name='time', units=iris.unit.Unit('days since epoch', 'gregorian')) cube.add_dim_coord(time_coord, 0) #add test coordinates for examples wanted ccat.add_year(cube, time_coord) ccat.add_day_of_month(cube, 'time') #NB test passing coord-name instead of coord itself ccat.add_month(cube, time_coord) ccat.add_month_shortname(cube, time_coord, name='month_short') ccat.add_month_fullname(cube, time_coord, name='month_full') ccat.add_month_number(cube, time_coord, name='month_number') ccat.add_weekday(cube, time_coord) ccat.add_weekday_number(cube, time_coord, name='weekday_number') ccat.add_weekday_shortname(cube, time_coord, name='weekday_short') ccat.add_weekday_fullname(cube, time_coord, name='weekday_full') ccat.add_season(cube, time_coord) ccat.add_season_number(cube, time_coord, name='season_number') ccat.add_season_month_initials(cube, time_coord, name='season_months') ccat.add_season_year(cube, time_coord, name='year_ofseason') #also test 'generic' categorisation interface def _month_in_quarter(coord, pt_value): date = coord.units.num2date(pt_value) return (date.month - 1) % 3 ccat.add_categorised_coord(cube, 'month_in_quarter', time_coord, _month_in_quarter) for coord_name in ['month_number', 'month_in_quarter', 'weekday_number', 'season_number', 'year_ofseason', 'year', 'day']: cube.coord(coord_name).points = cube.coord(coord_name).points.astype(np.int64) #check values self.assertCML(cube, ('categorisation', 'quickcheck.cml'))
def test_basic(self): cube = self.cube time_coord = self.time_coord ccat.add_year(cube, time_coord, "my_year") ccat.add_day_of_month(cube, time_coord, "my_day_of_month") ccat.add_day_of_year(cube, time_coord, "my_day_of_year") ccat.add_month(cube, time_coord, "my_month") ccat.add_month_fullname(cube, time_coord, "my_month_fullname") ccat.add_month_number(cube, time_coord, "my_month_number") ccat.add_weekday(cube, time_coord, "my_weekday") ccat.add_weekday_number(cube, time_coord, "my_weekday_number") ccat.add_weekday_fullname(cube, time_coord, "my_weekday_fullname") ccat.add_season(cube, time_coord, "my_season") ccat.add_season_number(cube, time_coord, "my_season_number") ccat.add_season_year(cube, time_coord, "my_season_year") # also test 'generic' categorisation interface def _month_in_quarter(coord, pt_value): date = coord.units.num2date(pt_value) return (date.month - 1) % 3 ccat.add_categorised_coord( cube, "my_month_in_quarter", time_coord, _month_in_quarter ) # To ensure consistent results between 32-bit and 64-bit # platforms, ensure all the numeric categorisation coordinates # are always stored as int64. for coord in cube.coords(): if coord.long_name is not None and coord.points.dtype.kind == "i": coord.points = coord.points.astype(np.int64) # check values self.assertCML(cube, ("categorisation", "quickcheck.cml"))
def draw_for_date(): cubes = iris.load(T_FILE_PATH) sst = cubes[1] coord_categorisation.add_month_number(sst, "time") coord_categorisation.add_day_of_month(sst, "time") sst_sel = sst.extract(iris.Constraint(month_number=7, day_of_month=1)) sst_sel.data = np.ma.masked_where(sst_sel.data == 0, sst_sel.data) b, lons, lats = nemo_commons.get_basemap_and_coordinates_from_file(T_FILE_PATH) #Plot the data fig = plt.figure() x, y = b(lons, lats) img = b.pcolormesh(x, y, sst_sel.data) b.colorbar(img) b.drawcoastlines() fname = "sst_1july_1958.jpeg" if not os.path.isdir(NEMO_IMAGES_DIR): os.mkdir(NEMO_IMAGES_DIR) fig.savefig(os.path.join(NEMO_IMAGES_DIR, fname))
def draw_for_date(): cubes = iris.load(T_FILE_PATH) sst = cubes[1] coord_categorisation.add_month_number(sst, "time") coord_categorisation.add_day_of_month(sst, "time") sst_sel = sst.extract(iris.Constraint(month_number=7, day_of_month=1)) sst_sel.data = np.ma.masked_where(sst_sel.data == 0, sst_sel.data) b, lons, lats = nemo_commons.get_basemap_and_coordinates_from_file( T_FILE_PATH) #Plot the data fig = plt.figure() x, y = b(lons, lats) img = b.pcolormesh(x, y, sst_sel.data) b.colorbar(img) b.drawcoastlines() fname = "sst_1july_1958.jpeg" if not os.path.isdir(NEMO_IMAGES_DIR): os.mkdir(NEMO_IMAGES_DIR) fig.savefig(os.path.join(NEMO_IMAGES_DIR, fname))
def read_pr_sm_topo(project_info, model): """ ;; Arguments ;; project_info: dictionary ;; all info from namelist ;; ;; Return ;; pr: iris cube [time, lat, lon] ;; precipitation time series ;; sm: iris cube [time, lat, lon] ;; soil moisture time series ;; topo: array [lat, lon] ;; topography ;; lon: array [lon] ;; longitude ;; lat: array [lat] ;; latitude ;; time: iris cube coords ;; time info of cube ;; time_bnds_1: float ;; first time_bnd of time series ;; ;; ;; Description ;; Read cmip5 input data for computing the diagnostic ;; """ import projects E = ESMValProject(project_info) verbosity = E.get_verbosity() #------------------------- # Read model info #------------------------- currProject = getattr(vars()['projects'], model.split_entries()[0])() model_info = model.split_entries() mip = currProject.get_model_mip(model) exp = currProject.get_model_exp(model) start_year = currProject.get_model_start_year(model) end_year = currProject.get_model_end_year(model) years = range(int(start_year), int(end_year) + 1) ''' #------------------------- # Read model info #------------------------- model_name = model_info[1] time_step = model_info[2] exp_fam = model_info[3] model_run = model_info[4] year_start = model_info[5] year_end = model_info[6] filedir = model_info[7] years = range(int(year_start), int(year_end)+1) ''' #------------------------- # Input data directories #------------------------- currDiag = project_info['RUNTIME']['currDiag'] pr_index = currDiag.get_variables().index('pr') pr_field = currDiag.get_field_types()[pr_index] sm_index = currDiag.get_variables().index('mrsos') sm_field = currDiag.get_field_types()[sm_index] indir = currProject.get_cf_outpath(project_info, model) in_file = currProject.get_cf_outfile(project_info, model, pr_field, 'pr', mip, exp) pr_files = [os.path.join(indir, in_file)] in_file = currProject.get_cf_outfile(project_info, model, sm_field, 'mrsos', mip, exp) sm_files = [os.path.join(indir, in_file)] ''' #------------------------- # Input data directories #------------------------- pr_files = [] sm_files = [] for yy in years: Patt = filedir+'pr_'+time_step+'_'+model_name+'_'+exp_fam+'_'+\ model_run+'_'+str(yy)+'*.nc' pr_files.append(glob.glob(Patt)) Patt = filedir+'mrsos_'+time_step+'_'+model_name+'_'+exp_fam+'_'+\ model_run+'_'+str(yy)+'*.nc' sm_files.append(glob.glob(Patt)) pr_files = [l[0] for l in pr_files if len(l)>0] pr_files = sorted(pr_files) sm_files = [l[0] for l in sm_files if len(l)>0] sm_files = sorted(sm_files) ''' #---------------------- # Read in precipitation #---------------------- pr_list = [] for pr_file in pr_files: info('Reading precipitation from ' + pr_file, verbosity, required_verbosity=1) pr = iris.load(pr_file)[0] for at_k in pr.attributes.keys(): pr.attributes.pop(at_k) pr_list.append(pr) pr = iris.cube.CubeList(pr_list) pr = pr.concatenate()[0] # Convert longitude from 0_360 to -180_180 pr = coord_change([pr])[0] # Add metadata: day, month, year add_month(pr, 'time') add_day_of_month(pr, 'time', name='dom') add_year(pr, 'time') # Convert units to kg m-2 hr-1 pr.convert_units('kg m-2 hr-1') #----------------------- # Read in soil moisture #----------------------- sm_list = [] for sm_file in sm_files: info('Reading soil moisture from ' + sm_file, verbosity, required_verbosity=1) sm = iris.load(sm_file)[0] for at_k in sm.attributes.keys(): sm.attributes.pop(at_k) sm_list.append(sm) sm = iris.cube.CubeList(sm_list) sm = sm.concatenate()[0] # Convert longitude from 0_360 to -180_180 sm = coord_change([sm])[0] # Add metadata: day, month, year add_month(sm, 'time') add_day_of_month(sm, 'time', name='dom') add_year(sm, 'time') #---------------------------------------------- # Constrain pr and sm data to latitude 60S_60N #---------------------------------------------- latconstraint = iris.Constraint(latitude=lambda cell: -59.0 <= cell <= 59.0) pr = pr.extract(latconstraint) sm = sm.extract(latconstraint) #--------------------------------------------------- # Read in grid info: latitude, longitude, timestamp #--------------------------------------------------- lon = sm.coords('longitude')[0].points lat = sm.coords('latitude')[0].points time = sm.coords('time') # -------------------------------------- # Convert missing data (if any) to -999. # -------------------------------------- try: sm.data.set_fill_value(-999) sm.data.data[sm.data.mask] = -999. except: info('no missing data conversion', verbosity, required_verbosity=1) #---------------------- # Read in topography #---------------------- # Topography map specs: # latitude 60S_60N # longitude 180W_180E # model resolution #ftopo = currProject.get_cf_fx_file(project_info, model) #dt = '>f4' #topo = (np.fromfile(ftopo, dtype=dt)).reshape(len(lat), len(lon)) topo = get_topo(project_info, lon, lat, model) #---------------------- # Read in time bounds #---------------------- indir, infiles = currProject.get_cf_infile(project_info, model, pr_field, 'pr', mip, exp) Patt = os.path.join(indir, infiles) pr_files = sorted(glob.glob(Patt)) ncf = nc4.Dataset(pr_files[0]) time_bnds_1 = ncf.variables['time_bnds'][0][0] time_bnds_1 = time_bnds_1 - int(time_bnds_1) ncf.close() #----------------------------------------------- # Return input data to compute sm_pr diagnostic #----------------------------------------------- return pr, sm, topo, lon, lat, time, time_bnds_1
def read_pr_sm_topo(filedir, years): """ ;; Arguments ;; filedir: dir ;; directory with input data ;; years: list of int ;; list of years for the analysis ;; ;; Return ;; pr: iris cube [time, lat, lon] ;; precipitation time series ;; sm: iris cube [time, lat, lon] ;; soil moisture time series ;; topo: array [lat, lon] ;; topography ;; lon: array [lon] ;; longitude ;; lat: array [lat] ;; latitude ;; time: iris cube coords ;; time info of cube ;; ;; ;; Description ;; Read cmip5 input data for computing the diagnostic ;; """ #------------------------- # Input data directories #------------------------- Patt = filedir + 'pr_3hr_inmcm4_amip_r1i1p1_{}010101-{}123122.nc' pr_files = [Patt.format(y, y) for y in years] Patt = filedir + 'mrsos_3hr_inmcm4_amip_r1i1p1_{}010100-{}123121.nc' sm_files = [Patt.format(y, y) for y in years] #---------------------- # Read in precipitation #---------------------- pr_list = [] for pr_file in pr_files: print 'Reading precipitation from ' + pr_file pr = iris.load(pr_file)[0] for at_k in pr.attributes.keys(): pr.attributes.pop(at_k) pr_list.append(pr) pr = iris.cube.CubeList(pr_list) pr = pr.concatenate()[0] # Convert longitude from 0_360 to -180_180 pr = coord_change([pr])[0] # Add metadata: day, month, year add_month(pr, 'time') add_day_of_month(pr, 'time', name='dom') add_year(pr, 'time') # Convert units to kg m-2 hr-1 pr.convert_units('kg m-2 hr-1') #----------------------- # Read in soil moisture #----------------------- sm_list = [] for sm_file in sm_files: print 'Reading soil moisture from ' + sm_file sm = iris.load(sm_file)[0] for at_k in sm.attributes.keys(): sm.attributes.pop(at_k) sm_list.append(sm) sm = iris.cube.CubeList(sm_list) sm = sm.concatenate()[0] # Convert longitude from 0_360 to -180_180 sm = coord_change([sm])[0] # Add metadata: day, month, year add_month(sm, 'time') add_day_of_month(sm, 'time', name='dom') add_year(sm, 'time') #---------------------------------------------- # Constrain pr and sm data to latitude 60S_60N #---------------------------------------------- latconstraint = iris.Constraint( latitude=lambda cell: -59.0 <= cell <= 59.0) pr = pr.extract(latconstraint) sm = sm.extract(latconstraint) #--------------------------------------------------- # Read in grid info: latitude, longitude, timestamp #--------------------------------------------------- lon = sm.coords('longitude')[0].points lat = sm.coords('latitude')[0].points time = sm.coords('time') # -------------------------------------- # Convert missing data (if any) to -999. # -------------------------------------- try: sm.data.set_fill_value(-999) sm.data.data[sm.data.mask] = -999. except: print 'no missing data conversion' #---------------------- # Read in topography #---------------------- # Topography map specs: # latitude 60S_60N # longitude 180W_180E # model resolution ftopo = filedir + 'topo_var_5x5_inmcm4.gra' dt = '>f4' topo = (np.fromfile(ftopo, dtype=dt)).reshape(len(lat), len(lon)) #----------------------------------------------- # Return input data to compute sm_pr diagnostic #----------------------------------------------- return pr, sm, topo, lon, lat, time
def add_time_coord_cats(cube): """ This function takes in an iris cube, and adds a range of numeric co-ordinate categorisations to it. Depending on the data, not all of the coords added will be relevant. args ---- cube: iris cube that has a coordinate called 'time' Returns ------- Cube: cube that has new time categorisation coords added Notes ----- test A simple example: >>> file = os.path.join(conf.DATA_DIR, 'mslp.daily.rcm.viet.nc') >>> cube = iris.load_cube(file) >>> coord_names = [coord.name() for coord in cube.coords()] >>> print((', '.join(coord_names))) time, grid_latitude, grid_longitude >>> ccube = add_time_coord_cats(cube) >>> coord_names = [coord.name() for coord in ccube.coords()] >>> print((', '.join(coord_names))) time, grid_latitude, grid_longitude, day_of_month, day_of_year, month, \ month_number, season, season_number, year >>> # print every 50th value of the added time cat coords ... for c in coord_names[3:]: ... print(ccube.coord(c).long_name) ... print(ccube.coord(c).points[::50]) ... day_of_month [ 1 21 11 1 21 11 1 21] day_of_year [ 1 51 101 151 201 251 301 351] month ['Jan' 'Feb' 'Apr' 'Jun' 'Jul' 'Sep' 'Nov' 'Dec'] month_number [ 1 2 4 6 7 9 11 12] season ['djf' 'djf' 'mam' 'jja' 'jja' 'son' 'son' 'djf'] season_number [0 0 1 2 2 3 3 0] year [2000 2000 2000 2000 2000 2000 2000 2000] """ # most errors pop up when you try to add a coord that has # previously been added, or the cube doesn't contain the # necessary attribute. ccube = cube.copy() # numeric try: iccat.add_day_of_year(ccube, "time") except AttributeError as err: print(("add_time_coord_cats: {}, skipping . . . ".format(err))) except ValueError as err: print(("add_time_coord_cats: {}, skipping . . . ".format(err))) try: iccat.add_day_of_month(ccube, "time") except AttributeError as err: print(("add_time_coord_cats: {}, skipping . . . ".format(err))) except ValueError as err: print(("add_time_coord_cats: {}, skipping . . . ".format(err))) try: iccat.add_month_number(ccube, "time") except AttributeError as err: print(("add_time_coord_cats: {}, skipping . . . ".format(err))) except ValueError as err: print(("add_time_coord_cats: {}, skipping . . . ".format(err))) try: iccat.add_season_number(ccube, "time") except AttributeError as err: print(("add_time_coord_cats: {}, skipping . . . ".format(err))) except ValueError as err: print(("add_time_coord_cats: {}, skipping . . . ".format(err))) try: iccat.add_year(ccube, "time") except AttributeError as err: print(("add_time_coord_cats: {}, skipping . . . ".format(err))) except ValueError as err: print(("add_time_coord_cats: {}, skipping . . . ".format(err))) # strings try: iccat.add_month(ccube, "time") except AttributeError as err: print(("add_time_coord_cats: {}, skipping . . . ".format(err))) except ValueError as err: print(("add_time_coord_cats: {}, skipping . . . ".format(err))) try: iccat.add_season(ccube, "time") except AttributeError as err: print(("add_time_coord_cats: {}, skipping . . . ".format(err))) except ValueError as err: print(("add_time_coord_cats: {}, skipping . . . ".format(err))) return ccube