Example #1
0
def diag_foehn_Froude(meas_lat, meas_lon, prof_var):
    ''' Diagnose whether foehn conditions are present in data using the Froude number method described by Bannister (2015).

	Assumptions:

		- One Rossby wave of deformation from the mountain crest = ~150 km (longitude gridbox 42 in the domain used).

		- Assume representative mountain height of 2000 m.

	Criteria for foehn:

		- u at Z1 must exceed 2.0 m s-1

		- wind direction must be cross-peninsula

		- Foehn wind detected if Froude number exceeds 0.9 for 6+ hours

	Inputs:

		- meas_lat, meas_lon: latitude and longitude of the location at which you would like to diagnose foehn, typically the location of an AWS.

		- prof_var: dictionary of profile variables for the year of interest, retrieved using the function load_vars.

	Returns:

	    - foehn_freq: a 1D Boolean time series showing whether foehn is occurring or not at the location requested.

	    '''
    # Find gridbox of latitudes and longitudes of AWS/location at which foehn is occurring
    lon_idx, lat_idx = find_gridbox(meas_lat,
                                    meas_lon,
                                    real_lon=prof_var['lon'],
                                    real_lat=prof_var['lat'])
    # Find model level closest to 2000 m
    Z1 = np.argmin((prof_var['altitude'][:, lat_idx, 42].points - 2000)**2)
    # Find representative u wind upstream of mountains by at least one Rossby wave of deformation and above the mountain crest
    u_Z1 = np.mean(
        prof_var['u'][:, 7:Z1, lat_idx, 42].data, axis=1
    )  # take mean of flow between ~200 m - Z1 (levels 7-Z1) as per Elvidge et al. (2015)
    v_Z1 = np.mean(prof_var['v'][:, 7:Z1, lat_idx, 42].data, axis=1)
    # Calculate wind direction at this height
    WD = metpy.calc.wind_direction(u=u_Z1, v=v_Z1)
    # Calculate Froude number of upstream flow
    Fr, h_hat = Froude_number(u_Z1)
    foehn_freq = np.zeros(prof_var['u'][:, 0, lat_idx, lon_idx].data.shape)
    for timestep in range(len(foehn_freq) - 2):
        # If representative upstream wind direction is cross-peninsula
        for t in range(3):
            if WD.magnitude[timestep + t] <= 300. and WD.magnitude[timestep +
                                                                   t] >= 240.:
                # If Froude number > 0.9 for 6+ hours, diagnose foehn conditions
                if Fr[timestep] >= 0.9 and Fr[timestep + 1] >= 0.9 and Fr[
                        timestep + 2] >= 0.9:
                    foehn_freq[timestep] = 1.
            else:
                foehn_freq[timestep] = 0.
    return foehn_freq
Example #2
0
def diag_foehn_surf(meas_lat, meas_lon, surf_var):
    ''' Diagnose whether foehn conditions are present in data using the surface meteorology method described by Turton et al. (2018).

	Criteria for foehn:

		- surface temperature increase of >= 2 K for 6+ hours

		- wind direction must be cross-peninsula for 6+ hours

		- wind speed must increase for 6+ hours

		- relative humidity must decrease for 6+ hours

	Inputs:

		- meas_lat, meas_lon: latitude and longitude of the location at which you would like to diagnose foehn, typically the location of an AWS.

		- surf_var: dictionary of surface variables for the year of interest, retrieved using the function load_vars.

	Returns:

	    - foehn_freq: a 1D Boolean time series showing whether foehn is occurring or not at the location requested.

	    '''
    # Find gridbox of latitudes and longitudes of AWS/location at which foehn is occurring
    lon_idx, lat_idx = find_gridbox(meas_lat,
                                    meas_lon,
                                    real_lon=surf_var['lon'],
                                    real_lat=surf_var['lat'])
    model_df = pd.DataFrame()
    model_df['FF_10m'] = pd.Series(surf_var['FF_10m'][:, lat_idx,
                                                      lon_idx].data)
    model_df['WD'] = pd.Series(surf_var['WD'][:, lat_idx, lon_idx].data)
    model_df['RH'] = pd.Series(surf_var['RH'][:, lat_idx, lon_idx].data)
    model_df['Tair'] = pd.Series(surf_var['Tair'][:, lat_idx, lon_idx].data)
    model_df['wind_difs'] = model_df['FF_10m'].diff(periods=2)
    model_df['RH_difs'] = model_df['RH'].diff(periods=2)
    model_df['T_difs'] = model_df['Tair'].diff(periods=2)
    foehn_df = model_df.loc[(
        (model_df.RH_difs <= rh_thresh[station_dict[station]]) &
        (model_df.T_difs > 0.)) |
                            ((model_df.RH <= model_df.RH.quantile(q=0.1)) &
                             (model_df.T_difs > 0.)) |
                            ((model_df.RH <= model_df.RH.quantile(q=0.15)) &
                             (model_df.T_difs > 3.))]
    foehn_freq = len(foehn_df)
    return foehn_freq, foehn_df
Example #3
0
        'LWnet': LWnet[:, 0, :, :],
        'SWnet': SWnet[:, 0, :, :],
        'LWdown': LWdown[:, 0, :, :],
        'SWdown': SWdown[:, 0, :, :],
        'HL': HL[:, 0, :, :],
        'HS': HS[:, 0, :, :],
        'Etot': Etot[:, 0, :, :],
        'melt': melt_flux[:, 0, :, :],
        'Time_srs': t_num
    }
    return seas_SEB


seas_var = load_var('DJF')

lon_index14, lat_index14, = find_gridbox(-67.01, -61.03, seas_var['lat'],
                                         seas_var['lon'])
lon_index15, lat_index15, = find_gridbox(-67.34, -62.09, seas_var['lat'],
                                         seas_var['lon'])
lon_index17, lat_index17, = find_gridbox(-65.93, -61.85, seas_var['lat'],
                                         seas_var['lon'])
lon_index18, lat_index18, = find_gridbox(-66.48272, -63.37105, seas_var['lat'],
                                         seas_var['lon'])

lat_dict = {
    'AWS14': lat_index14,
    'AWS15': lat_index15,
    'AWS17': lat_index17,
    'AWS18': lat_index18
}

lon_dict = {
Example #4
0
def combo_foehn(meas_lat, meas_lon, prof_var, surf_var):
    ''' Diagnose whether foehn conditions are present in data using the isentrope method described by Bannister (2015) and King et al. (2017).

	Assumptions:

		- One Rossby wave of deformation from the mountain crest = ~150 km (longitude gridbox 42 in the domain used).

		- Assume representative mountain height of 2000 m.

	Criteria for foehn:

		- u at Z1 must exceed 2.0 m s-1 (i.e. wind must have a cross-peninsula component).

		- Difference between height Z1 and the height of the Z1 isentrope in the transect defined in lee of the barrier (Z2), i.e. Z3 = Z1-Z2, must exceed 500 m over 6+ hours.

		- Surface warming (dT > 0.) must be observed.

		- Surface drying (dRH < 0.) must be observed.

	Inputs:

		- meas_lat, meas_lon: latitude and longitude of the location at which you would like to diagnose foehn, typically the location of an AWS.

		- prof_var: dictionary of profile variables for the year of interest, retrieved using the function load_vars.

	Returns:

	    - foehn_freq: an integer counting the number of timesteps at which foehn occurs at the location requested.

	    - foehn_df: a pandas DataFrame containing information about each timestep diagnosed as being foehn conditions.

	    '''
    # Find gridbox of latitudes and longitudes of AWS/location at which foehn is occurring
    lon_idx, lat_idx = find_gridbox(y=meas_lat,
                                    x=meas_lon,
                                    real_lat=prof_var['lat'],
                                    real_lon=prof_var['lon'])
    # Find model level closest to 2000 m
    Z1 = np.argmin((prof_var['altitude'][:, lat_idx, 42].points - 2000)**2)
    # Find representative u wind upstream of mountains by at least one Rossby wave of deformation and above the mountain crest
    u_Z1 = np.mean(prof_var['u'][:, 7:Z1, lat_idx, 42].data, axis=1)
    # Calculate elevation of theta isentrope upstream
    isen = np.copy(prof_var['theta'][:, Z1, lat_idx, 42].data)
    # Define 40 km transect from peak of orography across ice shelf
    # At each latitude, find the location of the maximum height of orography
    max_alt = np.argmax(prof_var['orog'].data, axis=1)
    # Define a 40 km transect on the Eastern side, i.e. over Larsen, from the peak of orography at that latitude over which to measure Z3
    transect_lons = np.asarray((max_alt, max_alt + 22))
    theta_transect = np.copy(prof_var['theta'][:, :, lat_idx, transect_lons[
        0, lat_idx]:transect_lons[1, lat_idx]].data)  #:130
    foehn_freq = np.zeros(prof_var['u'][:, 0, lat_idx, lon_idx].data.shape)
    Z2 = np.zeros(prof_var['u'][:, 0, lat_idx, lon_idx].data.shape)
    Z3 = np.zeros(prof_var['u'][:, 0, lat_idx, lon_idx].data.shape)
    for timestep in range(len(foehn_freq) - 2):
        for t in range(3):
            # Find timesteps where u >= 2.0 m s-1
            if u_Z1[timestep + t] > 2.0:
                # Find the minimum height of the upstream isentrope theta_Z1 in the transect defined, Z2.
                try:
                    hts, lons = np.where(theta_transect[timestep] ==
                                         isen[timestep])  # try this method
                    min_ht = np.min(hts)
                    Z2[timestep] = prof_var['altitude'].points[min_ht, lat_idx,
                                                               lon_idx]
                except ValueError:
                    Z2[timestep] = np.nan
            else:
                Z2[timestep] = np.nan
        # Find difference between Z1 and Z2
        Z3[timestep] = prof_var['altitude'].points[Z1, lat_idx,
                                                   42] - Z2[timestep]
    model_df = pd.DataFrame()
    model_df['FF_10m'] = pd.Series(surf_var['FF_10m'][:, lat_idx,
                                                      lon_idx].data)
    model_df['WD'] = pd.Series(surf_var['WD'][:, lat_idx, lon_idx].data)
    model_df['RH'] = pd.Series(surf_var['RH'][:, lat_idx, lon_idx].data)
    model_df['Tair'] = pd.Series(surf_var['Tair'][:, lat_idx, lon_idx].data)
    model_df['Z3'] = pd.Series(np.repeat(
        Z3, 2))  # upsample to be compatible with 3-hourly data
    model_df['wind_difs'] = model_df['FF_10m'].diff(periods=2)
    model_df['RH_difs'] = model_df['RH'].diff(periods=2)
    model_df['T_difs'] = model_df['Tair'].diff(periods=2)
    # If Z3 > 1000 m for 6 hours or more (two instantaneous timesteps for 6-hourly data = at least 6 hours) thresholds: FF = 1.0, T = 2.0, RH = -5
    #foehn_df = model_df.loc[((model_df.RH_difs <= rh_thresh[station]) & (model_df.T_difs > 0.) & (model_df.Z3 >= 470.)) | ((model_df.RH <= model_df.RH.quantile(q=0.1)) & (model_df.T_difs > 0.) & (model_df.Z3 >= 470.)) | ((model_df.RH <= model_df.RH.quantile(q=0.15)) & (model_df.T_difs > 3.) & (model_df.Z3 >= 470.))]
    foehn_df = model_df.loc[((model_df.RH_difs < 0) & (model_df.T_difs > 0.) &
                             (model_df.Z3 >= 470.))]
    foehn_freq = len(foehn_df)
    return foehn_freq, foehn_df
def load_surf(which):  # 'which' can be either 'old' or 'new'
    '''Load time series of MetUM model output.

    Inputs:

    'which': either 'old' for time series using original MetUM model orography and coastlines, or 'new' for updated MetUM
             model orography and coastlines.

    Outputs:

    surf_var: Dictionary of surface meteorological variables.

    '''
    surf = []
    if which == 'old':
        os.chdir(f_old)
        for file in os.listdir(f_old):
            if fnmatch.fnmatch(file, '*km1p5_smoothed_pa000.pp'):
                surf.append(file)
    elif which == 'new':
        os.chdir(f_new)
        for file in os.listdir(f_new):
            if fnmatch.fnmatch(file, '*km1p5_ctrl_pa012.pp'):
                surf.append(file)
    print('\n importing cubes...')
    T_surf = iris.load_cube(surf, 'surface_temperature')
    T_air = iris.load_cube(surf, 'air_temperature')
    orog = iris.load_cube(
        '/data/clivarm/wip/ellgil82/May_2016/Re-runs/km1p5_orog.pp',
        'surface_altitude'
    )  # This assumes your orography and land-sea mask are stored separately,
    lsm = iris.load_cube(
        '/data/clivarm/wip/ellgil82/May_2016/Re-runs/km1p5_lsm.pp',
        'land_binary_mask'
    )  # but can be adapted to read in from one of your file streams.
    T_surf.convert_units('celsius')
    T_air.convert_units('celsius')
    RH = iris.load_cube(surf, 'relative_humidity')
    ## Iris v1.11 version
    u_wind = iris.load_cube(surf, 'x_wind')
    v_wind = iris.load_cube(surf, 'y_wind')
    if which == 'old':
        v_wind = v_wind[:, :, 1:, :]
    elif which == 'new':
        v_wind = v_wind[:, 1:, :]
    Var = [T_surf, T_air, RH, u_wind, v_wind]
    ## Rotate projection
    print('\n rotating pole...')
    for var in Var:
        if which == 'old':
            real_lon, real_lat = rotate_data(var, 2, 3)
        elif which == 'new':
            real_lon, real_lat = rotate_data(var, 1, 2)
    ## Find the nearest grid box to the latitude of interest
    print('\n finding AWS...')
    lon_index, lat_index = find_gridbox(-66.48272, -63.37105, real_lat,
                                        real_lon)
    print('\n converting time units...')
    #convert units within iris
    Time = T_surf.coord('time')
    Time_srs = Time.units.num2date(Time.points)
    print('\n calculating wind speed...')
    ##convert u and v wind to wind speed
    #convert to numpy array
    v_CI = (v_wind.data)
    u_CI = (u_wind.data)
    sp_srs = np.sqrt((u_CI**2) + (v_CI**2))
    # Create Larsen mask !! Is this for 4 km or 1.5 ??
    a = np.ones((orog.shape))
    orog = orog[:270, 95:240].data
    lsm = lsm[:270, 95:240].data
    b = np.zeros((270, 95))
    c = np.zeros((270, 160))
    d = np.zeros((130, 400))
    orog = np.hstack((b, orog))
    orog = np.hstack((orog, c))
    orog = np.vstack((orog, d))
    lsm = np.hstack((b, lsm))
    lsm = np.hstack((lsm, c))
    lsm = np.vstack((lsm, d))
    mask2d = np.ma.masked_where(orog.data > 15, a)
    Larsen_mask = np.ma.masked_where(lsm.data == 0, mask2d)
    Larsen_mask = np.broadcast_to(Larsen_mask == 1, T_surf.shape, subok=True)
    T_surf = np.ma.masked_array(T_surf.data, Larsen_mask.mask)
    RH = np.ma.masked_array(RH.data, Larsen_mask.mask)
    T_air = np.ma.masked_array(T_air.data, Larsen_mask.mask)
    sp_srs = np.ma.masked_array(sp_srs, Larsen_mask.mask)
    print('\n extracting time series from cubes...')
    # just one grid box
    if which == 'new':
        T_surf = T_surf[:, lat_index, lon_index].data
        T_air = T_air[:, lat_index, lon_index].data
        RH = RH[:, lat_index, lon_index].data
        sp_srs = sp_srs[:, lat_index, lon_index]
    elif which == 'old':
        T_surf = T_surf[:, :, lat_index, lon_index].data
        T_air = T_air[:, :, lat_index, lon_index].data
        RH = RH[:, :, lat_index, lon_index].data
        sp_srs = sp_srs[:, :, lat_index, lon_index]
        T_surf = construct_srs(T_surf)
        T_air = construct_srs(T_air)
        RH = construct_srs(RH)
        sp_srs = construct_srs(sp_srs)
        Time_srs = construct_Timesrs(Time_srs)
    RH[RH > 100] = 100
    print('\n constructing series...')
    var_dict = {
        'sp_srs': sp_srs,
        'Ts': T_surf,
        'T_air': T_air,
        'RH': RH,
        'Time_srs': Time_srs
    }
    return var_dict
Example #6
0
def sens_test(res):
    surf = []
    for file in os.listdir(filepath):
        if fnmatch.fnmatch(file, '*%(res)s_*_pa012.pp' % locals()):
            surf.append(file)
    print('\n importing cubes...')
    os.chdir(filepath)
    T_surf = iris.load_cube(surf, 'surface_temperature')
    T_air = iris.load_cube(surf, 'air_temperature')
    orog = iris.load_cube(filepath + res + '_orog.pp', 'surface_altitude')
    lsm = iris.load_cube(filepath + res + '_lsm.pp', 'land_binary_mask')
    T_surf.convert_units('celsius')
    T_air.convert_units('celsius')
    RH = iris.load_cube(surf, 'relative_humidity')
    ## Iris v1.11 version
    u_wind = iris.load_cube(surf, 'x_wind')
    v_wind = iris.load_cube(surf, 'y_wind')
    v_wind = v_wind[:, 1:, :]
    Var = [T_surf, T_air, RH, u_wind, v_wind]
    ## Rotate projection
    print('\n rotating pole...')
    #create numpy arrays of coordinates
    rotated_lat = RH.coord('grid_latitude').points
    rotated_lon = RH.coord('grid_longitude').points
    ## set up parameters for rotated projection
    pole_lon = 298.5
    pole_lat = 22.99
    #rotate projection
    real_lon, real_lat = iris.analysis.cartography.unrotate_pole(
        rotated_lon, rotated_lat, pole_lon, pole_lat)
    print('\nunrotating pole...')
    lat = RH.coord('grid_latitude')
    lon = RH.coord('grid_longitude')
    lat = iris.coords.DimCoord(real_lat,
                               standard_name='latitude',
                               long_name="grid_latitude",
                               var_name="lat",
                               units=lat.units)
    lon = iris.coords.DimCoord(real_lon,
                               standard_name='longitude',
                               long_name="grid_longitude",
                               var_name="lon",
                               units=lon.units)
    for var in Var:
        var.remove_coord('grid_latitude')
        var.add_dim_coord(lat, data_dim=1)
        var.remove_coord('grid_longitude')
        var.add_dim_coord(lon, data_dim=2)
    ## Find the nearest grid box to the latitude of interest
    print('\n finding AWS...')
    lon_index, lat_index = find_gridbox(-66.48272, -63.37105, real_lat,
                                        real_lon)
    print('\n converting time units...')
    #convert units within iris
    Time = T_surf.coord('time')
    Time_srs = Time.units.num2date(Time.points)
    #convert to numpy array
    v_CI = (v_wind.data)
    u_CI = (u_wind.data)
    sp_srs = np.sqrt((u_CI**2) + (v_CI**2))
    Ts_subset = T_surf[:, lat_index, 140]
    Ta_subset = T_air[:, lat_index, 140]
    wind_subset = sp_srs[:, lat_index, 140]
    return Ts_subset, Ta_subset, wind_subset
Example #7
0
def load_surf(res):
    '''Function to load in surface meteorological data from the MetUM. Make sure the file names point to the correct file stream where your variables are stored.
        This can be adapted to use other formats, e.g. NetCDF, GRIB etc. (see Iris docs for further information: https://scitools.org.uk/iris/docs/latest/#).'''
    surf = []
    for file in os.listdir(filepath):
        if fnmatch.fnmatch(file, '*%(res)s_*_pa012.pp' % locals()):
            surf.append(file)
    print('\n importing cubes...')
    os.chdir(filepath)
    T_surf = iris.load_cube(surf, 'surface_temperature')
    T_air = iris.load_cube(surf, 'air_temperature')
    orog = iris.load_cube(
        filepath + res + '_orog.pp', 'surface_altitude'
    )  # This assumes your orography and land-sea mask are stored separately,
    lsm = iris.load_cube(
        filepath + res + '_lsm.pp', 'land_binary_mask'
    )  # but can be adapted to read in from one of your file streams.
    T_surf.convert_units('celsius')
    T_air.convert_units('celsius')
    RH = iris.load_cube(surf, 'relative_humidity')
    ## Iris v1.11 version
    u_wind = iris.load_cube(surf, 'x_wind')
    v_wind = iris.load_cube(surf, 'y_wind')
    v_wind = v_wind[:, 1:, :]
    Var = [T_surf, T_air, RH, u_wind, v_wind]
    ## Rotate projection
    print('\n rotating pole...')
    for var in Var:
        real_lon, real_lat = rotate_data(var, 1, 2)
    ## Find the nearest grid box to the latitude of interest
    print('\n finding AWS...')
    lon_index, lat_index = find_gridbox(-66.48272, -63.37105, real_lat,
                                        real_lon)
    print('\n converting time units...')
    #convert units within iris
    Time = T_surf.coord('time')
    Time_srs = Time.units.num2date(Time.points)
    print('\n calculating wind speed...')
    ##convert u and v wind to wind speed
    #convert to numpy array
    v_CI = (v_wind.data)
    u_CI = (u_wind.data)
    sp_srs = np.sqrt((u_CI**2) + (v_CI**2))
    # Create Larsen mask !! Is this for 4 km or 1.5 ??
    a = np.ones((orog.shape))
    orog = orog[:270, 95:240].data
    lsm = lsm[:270, 95:240].data
    b = np.zeros((270, 95))
    c = np.zeros((270, 160))
    d = np.zeros((130, 400))
    orog = np.hstack((b, orog))
    orog = np.hstack((orog, c))
    orog = np.vstack((orog, d))
    lsm = np.hstack((b, lsm))
    lsm = np.hstack((lsm, c))
    lsm = np.vstack((lsm, d))
    mask2d = np.ma.masked_where(orog.data > 15, a)
    Larsen_mask = np.ma.masked_where(lsm.data == 0, mask2d)
    Larsen_mask = np.broadcast_to(Larsen_mask == 1, T_surf.shape, subok=True)
    T_surf = np.ma.masked_array(T_surf.data, Larsen_mask.mask)
    RH = np.ma.masked_array(RH.data, Larsen_mask.mask)
    T_air = np.ma.masked_array(T_air.data, Larsen_mask.mask)
    sp_srs = np.ma.masked_array(sp_srs, Larsen_mask.mask)
    # Calculate 5th and 95th percentiles to give estimate of variability in time series
    print('\n calculating percentiles... (this may take some time)')
    percentiles = []
    for each_var in [T_surf, T_air, RH, sp_srs]:
        p95 = np.percentile(each_var, 95, axis=(1, 2))
        p5 = np.percentile(each_var, 5, axis=(1, 2))
        percentiles.append(p5)
        percentiles.append(p95)
    print('\n extracting time series from cubes...')
    # just one grid box
    T_surf = T_surf[:, lat_index, lon_index].data
    T_air = T_air[:, lat_index, lon_index].data
    RH = RH[:, lat_index, lon_index].data
    RH[RH > 100] = 100
    sp_srs = sp_srs[:, lat_index, lon_index]
    print('\n constructing %(res)s series...' % locals())
    var_dict = {
        'sp_srs': sp_srs,
        'Ts': T_surf,
        'T_air': T_air,
        'RH': RH,
        'Time_srs': Time_srs,
        'percentiles': percentiles
    }
    return var_dict
Example #8
0
def load_SEB(res):
    '''Function to load in SEB data from the MetUM. Make sure the file names point to the correct file stream where your variables are stored.
    This can be adapted to use other formats, e.g. NetCDF, GRIB etc. (see Iris docs for further information: https://scitools.org.uk/iris/docs/latest/#).'''
    SEB = []
    surf = []
    for file in os.listdir(filepath):
        if fnmatch.fnmatch(file, '*%(res)s_*_pa012.pp' % locals()):
            surf.append(file)
        elif fnmatch.fnmatch(file, '*%(res)s_*_pb012.pp' % locals()):
            SEB.append(file)
    print('\n importing cubes at %(res)s resolution...' % locals())
    os.chdir(filepath)
    print('\n Downwelling shortwave...')
    SW_d = iris.load_cube(SEB, 'surface_downwelling_shortwave_flux_in_air')
    print('\n Downwelling longwave...')
    LW_d = iris.load_cube(SEB, 'surface_downwelling_longwave_flux')
    print('\n Net shortwave...')
    SW_n = iris.load_cube(SEB, 'surface_net_downward_shortwave_flux')
    print('\n Net longwave...')
    LW_n = iris.load_cube(SEB, 'surface_net_downward_longwave_flux')
    print('\n Latent heat...')
    LH = iris.load_cube(SEB, 'surface_upward_latent_heat_flux')
    print('\n Sensible heat...')
    SH = iris.load_cube(SEB, 'surface_upward_sensible_heat_flux')
    print('\n Surface temperature...')
    T_surf = iris.load_cube(surf, 'surface_temperature')
    T_surf.convert_units('celsius')
    Var = [SH, LH, LW_d, SW_d, LW_n, SW_n, T_surf]
    ## Rotate projection
    print('\n rotating pole...')
    for var in Var:
        real_lon, real_lat = rotate_data(var, 1, 2)
    ## Find the nearest grid box to the latitude of interest
    print('\n finding AWS...')
    lon_index, lat_index = find_gridbox(-66.48272, -63.37105, real_lat,
                                        real_lon)
    print('\n converting time units...')
    #convert units within iris
    Time = SH.coord('time')
    Time_srs = Time.units.num2date(Time.points)
    # Create Larsen mask to return values only a) on the ice shelf, b) orography is < 50 m
    orog = iris.load_cube(filepath + res + '_orog.pp', 'surface_altitude')
    lsm = iris.load_cube(filepath + res + '_lsm.pp', 'land_binary_mask')
    a = np.ones((orog.shape))
    orog = orog[:270, 95:240].data
    lsm = lsm[:270, 95:240].data
    b = np.zeros((270, 95))
    c = np.zeros((270, 160))
    d = np.zeros((130, 400))
    orog = np.hstack((b, orog))
    orog = np.hstack((orog, c))
    orog = np.vstack((orog, d))
    lsm = np.hstack((b, lsm))
    lsm = np.hstack((lsm, c))
    lsm = np.vstack((lsm, d))
    mask2d = np.ma.masked_where(orog.data > 50, a)
    Larsen_mask = np.ma.masked_where(lsm.data == 0, mask2d)
    Larsen_mask = np.broadcast_to(Larsen_mask == 1, T_surf.shape, subok=True)
    SH = np.ma.masked_array(SH.data, Larsen_mask.mask)
    LH = np.ma.masked_array(LH.data, Larsen_mask.mask)
    SW_d = np.ma.masked_array(SW_d.data, Larsen_mask.mask)
    LW_d = np.ma.masked_array(LW_d.data, Larsen_mask.mask)
    SW_n = np.ma.masked_array(SW_n.data, Larsen_mask.mask)
    LW_n = np.ma.masked_array(LW_n.data, Larsen_mask.mask)
    # Flip turbulent fluxes to match convention (positive = down)
    LH = 0 - LH
    SH = 0 - SH
    # Calculate 5th and 95th percentiles to give estimate of variability in time series
    print('\n calculating percentiles...')
    percentiles = []
    for each_var in [SH, LH, SW_d, SW_n, LW_d, LW_n]:
        p95 = np.percentile(each_var, 95, axis=(1, 2))
        p5 = np.percentile(each_var, 5, axis=(1, 2))
        percentiles.append(p5)
        percentiles.append(p95)
    print('\n extracting time series from cubes...')
    # Just one grid box
    SW_d = SW_d[:, lat_index, lon_index].data
    LW_d = LW_d[:, lat_index, lon_index].data
    SW_n = SW_n[:, lat_index, lon_index].data
    LW_n = LW_n[:, lat_index, lon_index].data
    LH = LH[:, lat_index, lon_index].data
    SH = SH[:, lat_index, lon_index].data
    T_surf = T_surf[:, lat_index, lon_index].data
    print('\n constructing %(res)s series...' % locals())
    print('\n making melt variable...')
    # Calculate total SEB (without Gs, which is unavailable in the UM)
    E = SW_n + LW_n + LH + SH
    # Create melt variable
    # Create masked array when Ts<0
    melt = np.ma.masked_where(T_surf < -0.025, E)
    melt = melt.data - melt.data * (np.ma.getmask(melt))
    melt_forced = np.ma.masked_where(AWS_var['Tsobs'] < -0.025, E)
    melt_forced = melt_forced.data - melt_forced.data * (
        np.ma.getmask(melt_forced))
    melt_forced[melt_forced < 0] = 0
    var_dict = {
        'Time_srs': Time_srs,
        'Ts': T_surf,
        'SW_n': SW_n,
        'SW_d': SW_d,
        'LW_n': LW_n,
        'LW_d': LW_d,
        'SH': SH,
        'LH': LH,
        'melt': melt,
        'melt_forced': melt_forced,
        'percentiles': percentiles,
        'E': E
    }
    return var_dict
    FF_max = iris.load_cube('1998-2017_FF_10m_monmax.nc', 'wind_speed')
    FF_10m = FF_10m[:,:, :220, :220]
    FF_min = FF_min[:, :, :220, :220]
    FF_max = FF_max[:, :, :220, :220]
    # Rotate data onto standard lat/lon grid
    for i in [melt, Ts, Tair, RH, FF_10m]:
        real_lon, real_lat = rotate_data(i, np.ndim(i) - 2, np.ndim(i) - 1)
    var_dict = {'melt': melt[:,0,:,:], 'Ts': Ts[:,0,:,:], 'Ts_max': Ts_max[:,0,:,:], 'Ts_min': Ts_min[:,0,:,:], 'FF_10m': FF_10m[:,0,:,:],
                'FF_10m_min': FF_min[:,0,:,:], 'FF_10m_max': FF_max[:,0,:,:],'RH': RH[:,0,:,:], 'RH_min': RH_min[:,0,:,:], 'RH_max': RH_max[:,0,:,:],
                'Time_srs': Ts.coord('time').points, 'Tair': Tair[:,0,:,:], 'Tair_max': Tair[:,0,:,:], 'Tair_min': Tair[:,0,:,:], 'lat': real_lat, 'lon': real_lon}
    return var_dict


#all_vars = load_time_srs()

lon_index14, lat_index14 = find_gridbox(-67.01, -61.03, all_vars['lat'], all_vars['lon'])
lon_index15, lat_index15 = find_gridbox(-67.34, -62.09, all_vars['lat'], all_vars['lon'])
lon_index17, lat_index17 = find_gridbox(-65.93, -61.85, all_vars['lat'], all_vars['lon'])
lon_index18, lat_index18 = find_gridbox(-66.48272, -63.37105, all_vars['lat'], all_vars['lon'])

lat_dict = {'AWS14': lat_index14,
            'AWS15': lat_index15,
            'AWS17': lat_index17,
            'AWS18': lat_index18}

lon_dict = {'AWS14': lon_index14,
            'AWS15': lon_index15,
            'AWS17': lon_index17,
            'AWS18': lon_index18}