Exemple #1
0
def find_nearest_latlon_xarray(arr,
                               lat=37.102400,
                               lon=-76.392900,
                               radius=12e3):
    """Short summary.

    Parameters
    ----------
    arr : type
        Description of parameter `arr`.
    lat : type
        Description of parameter `lat` (the default is 37.102400).
    lon : type
        Description of parameter `lon` (the default is -76.392900).
    radius : type
        Description of parameter `radius` (the default is 12e3).

    Returns
    -------
    type
        Description of returned object.

    """
    from pyresample import utils, geometry
    from numpy import array, vstack
    grid1 = geometry.GridDefinition(lons=arr.longitude, lats=arr.latitude)
    grid2 = geometry.GridDefinition(lons=vstack([lon]), lats=vstack([lat]))
    row, col = utils.generate_nearest_neighbour_linesample_arrays(
        grid1, grid2, radius)
    row = row.flatten()
    col = col.flatten()
    return arr.sel(x=col).sel(y=row).squeeze()
Exemple #2
0
def resample(layer, tl, br, samples=256):
    """
    Returns a grid, which is resampled.
    """

    data_grid = geometry.GridDefinition(lats=layer.lats, lons=layer.lons)
    
    # Form the coordinates for resampling
    rlons = np.linspace(tl[0], br[0], 256)
    rlats = np.linspace(tl[1], br[1], 256)

    resample_grid = geometry.GridDefinition(
        lats=np.tile(rlats, (rlons.size, 1)).T, 
        lons=np.tile(rlons, (rlats.size, 1))
        )

    # Build a resampler.
    resampler = image.ImageContainerNearest(
        layer.values, 
        data_grid, 
        radius_of_influence=6500, 
        reduce_data=True
        )

    # Form the appropriate grid.
    grid = np.flipud(resampler.resample(resample_grid).image_data)
    grid[grid == 0] = np.nan

    return grid
def create_custom_grid(ds, filename, var_name, dl, custom_grid):
    cgrid = xr.open_dataset(custom_grid)
    clon = cgrid['lon']
    clat = cgrid['lat']
    cgrid_def = geometry.GridDefinition(lons=clon, lats=clat)

    ogrid_def = geometry.GridDefinition(lons=ds['lon'], lats=ds['lat'])

    # Convert data to custom grid
    obs_container = image.ImageContainerNearest(ds[var_name].data[0, :, :],
                                                ogrid_def,
                                                radius_of_influence=20000000)
    obs_modelgrid = obs_container.resample(cgrid_def)
    data_out = obs_modelgrid.image_data

    obs_container = image.ImageContainerNearest(ds['error_std'].data[0, :, :],
                                                ogrid_def,
                                                radius_of_influence=20000000)
    obs_modelgrid = obs_container.resample(cgrid_def)
    err_out = obs_modelgrid.image_data

    ds = xr.Dataset(
        {
            var_name: (('time', 'x', 'y'), np.expand_dims(data_out, axis=0)),
            "error_std": (('time', 'x', 'y'), np.expand_dims(err_out, axis=0)),
            "lon": (('x', 'y'), clon),
            "lat": (('x', 'y'), clat)
        },
        coords={
            'time': dl[0:1],
        },
    )
    ds.to_netcdf(filename)
Exemple #4
0
def interp_to_obs(var, df, lat, lon, radius=12000.):
    """Short summary.

    Parameters
    ----------
    var : type
        Description of parameter `var`.
    df : type
        Description of parameter `df`.
    lat : type
        Description of parameter `lat`.
    lon : type
        Description of parameter `lon`.
    radius : type
        Description of parameter `radius` (the default is 12000.).

    Returns
    -------
    type
        Description of returned object.

    """
    from numpy import NaN, vstack
    from pyresample import geometry, image
    from pandas import to_timedelta, DataFrame
    # define CMAQ pyresample grid (source)
    grid1 = geometry.GridDefinition(lons=lon, lats=lat)
    # get unique sites from df
    dfn = df.drop_duplicates(subset=['Latitude', 'Longitude'])
    # define site grid (target)
    lats = dfn.Latitude.values
    lons = dfn.Longitude.values
    grid2 = geometry.GridDefinition(lons=vstack(lons), lats=vstack(lats))
    # Create image container
    i = image.ImageContainerNearest(var.transpose('y', 'x', 'time').values,
                                    grid1,
                                    radius_of_influence=radius,
                                    fill_value=NaN)
    # resample
    ii = i.resample(grid2).image_data.squeeze()
    # recombine data
    e = DataFrame(ii, index=dfn.SCS, columns=var.time.values)
    w = e.stack().reset_index().rename(columns={
        'level_1': 'datetime',
        0: 'model'
    })
    w = w.merge(dfn.drop(['datetime', 'datetime_local', 'Obs'], axis=1),
                on='SCS',
                how='left')
    w = w.merge(df[['datetime', 'SCS', 'Obs']],
                on=['SCS', 'datetime'],
                how='left')
    # calculate datetime local

    w['datetime_local'] = w.datetime + to_timedelta(w.utcoffset, 'H')

    return w
Exemple #5
0
    def test_nearest_neighbor_grid_grid(self):
        from pyresample import utils, geometry
        lon_arr = create_test_longitude(-95.0, -85.0, (40, 50), dtype=np.float64)
        lat_arr = create_test_latitude(25.0, 35.0, (40, 50), dtype=np.float64)
        grid_dst = geometry.GridDefinition(lons=lon_arr, lats=lat_arr)

        lon_arr = create_test_longitude(-100.0, -80.0, (400, 500), dtype=np.float64)
        lat_arr = create_test_latitude(20.0, 40.0, (400, 500), dtype=np.float64)
        grid = geometry.GridDefinition(lons=lon_arr, lats=lat_arr)
        rows, cols = utils.generate_nearest_neighbour_linesample_arrays(grid, grid_dst, 12000.)
Exemple #6
0
    def construct_source_mask(self):
        """ Read the binary file and set the mask """

        # Settings for the binary file
        xdim, ydim = 10800, 5400
        mask_struct_fmt = "<%0.fB" % (xdim * ydim)
        n_bytes_header = 1392

        # Read the content of the landmask in a string
        with open(self.mask_filepath, "rb") as fh:
            # Skip header
            fh.seek(n_bytes_header)
            content = fh.read(xdim * ydim)

        # decode string & order to array
        mask_val = np.array(struct.unpack(mask_struct_fmt, content))
        mask = mask_val.reshape((xdim, ydim))
        mask = mask.transpose()

        # Convert to only land/sea flag
        # Note: mask must be a byte data type since netCDF does not handle
        #       variables of type bool very well
        mask = np.int8(mask > 0)

        # Compute longitude/latitude grids
        lons_1d = np.linspace(0., 360., xdim)
        lats_1d = np.linspace(-90, 90, ydim)
        lons, lats = np.meshgrid(lons_1d, lats_1d)

        # Create geometry definitions
        area_def = geometry.GridDefinition(lons=lons, lats=lats)

        # Set the mask
        self.set_mask(mask, area_def)
Exemple #7
0
 def test_dtype(self):
     lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
     lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
     grid_def = geometry.GridDefinition(lons, lats)
     lons = numpy.asarray(lons, dtype='f4')
     lats  = numpy.asarray(lats, dtype='f4')
     swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
     valid_input_index, valid_output_index, index_array, distance_array = \
         kd_tree.get_neighbour_info(swath_def,
                                    grid_def,
                                    50000, neighbours=1, segments=1)
Exemple #8
0
def sample_latlon(layer, lat, lon):
    """
    Returns a float which is a value grid, which is resampled.
    """

    data_grid = geometry.GridDefinition(lats=layer.lats, lons=layer.lons)
    
    resampler = image.ImageContainerNearest(
        layer.values, 
        data_grid, 
        radius_of_influence=6500, 
        reduce_data=False
        )

    resample_grid = geometry.GridDefinition(
        lats=np.ones((1, 1)) * lat, 
        lons=np.ones((1, 1)) * lon)

    # Form the appropriate grid.
    grid = resampler.resample(resample_grid).image_data
    return float(grid[0][0])
Exemple #9
0
    def test_nearest_neighbor_area_grid(self):
        from pyresample import utils, geometry
        lon_arr = create_test_longitude(-94.9, -90.0, (50, 100), dtype=np.float64)
        lat_arr = create_test_latitude(25.1, 30.0, (50, 100), dtype=np.float64)
        grid = geometry.GridDefinition(lons=lon_arr, lats=lat_arr)

        proj_str = "+proj=lcc +datum=WGS84 +ellps=WGS84 +lat_0=25 +lat_1=25 +lon_0=-95 +units=m +no_defs"
        proj_dict = utils.proj4.proj4_str_to_dict(proj_str)
        extents = [0, 0, 1000. * 5000, 1000. * 5000]
        area_def = geometry.AreaDefinition('CONUS', 'CONUS', 'CONUS',
                                           proj_dict, 400, 500, extents)
        rows, cols = utils.generate_nearest_neighbour_linesample_arrays(area_def, grid, 12000.)
Exemple #10
0
    def test_nearest_neighbor_grid_area(self):
        from pyresample import utils, geometry
        proj_str = "+proj=lcc +datum=WGS84 +ellps=WGS84 +lat_0=25 +lat_1=25 +lon_0=-95 +units=m +no_defs"
        proj_dict = pyresample.utils._proj4.proj4_str_to_dict(proj_str)
        extents = [0, 0, 1000. * 2500., 1000. * 2000.]
        area_def = geometry.AreaDefinition('CONUS', 'CONUS', 'CONUS',
                                           proj_dict, 40, 50, extents)

        lon_arr = create_test_longitude(-100.0, -60.0, (550, 500), dtype=np.float64)
        lat_arr = create_test_latitude(20.0, 45.0, (550, 500), dtype=np.float64)
        grid = geometry.GridDefinition(lons=lon_arr, lats=lat_arr)
        rows, cols = utils.generate_nearest_neighbour_linesample_arrays(grid, area_def, 12000.)
Exemple #11
0
def grid_interpolation(src_grid,
                       tar_grid,
                       radius_of_influence=500000,
                       fill_value=None):
    src_lat = src_grid['lat']
    src_lon = src_grid['lon']
    src_data = src_grid['data']

    tar_lat = tar_grid['lat']
    tar_lon = tar_grid['lon']

    src_lon = src_lon % 360.0
    tar_lon = tar_lon % 360.0

    src_grid_def = geometry.GridDefinition(lons=src_lon, lats=src_lat)
    tar_grid_def = geometry.GridDefinition(lons=tar_lon, lats=tar_lat)
    src_img_container = image.ImageContainerNearest(
        src_data,
        src_grid_def,
        radius_of_influence=radius_of_influence,
        fill_value=fill_value)
    tar_data = src_img_container.resample(tar_grid_def)

    return tar_data.image_data
Exemple #12
0
def get_grid_def(lon, lat):
    """Short summary.

    Parameters
    ----------
    lon : type
        Description of parameter `lon`.
    lat : type
        Description of parameter `lat`.

    Returns
    -------
    type
        Description of returned object.

    """
    return geometry.GridDefinition(lons=lon, lats=lat)
Exemple #13
0
def readE_P():
    x_size = 251
    y_size = 251
    description = 'Arctic EASE grid'
    proj_id = 'ease_nh'
    from pyresample import geometry, utils, image
    area_id = 'ease_nh'
    area_extent = (-7326849.0625,-7326849.0625,7326849.0625,7326849.0625)
    proj_dict = {'a': '6371228.0', 'units': 'm', 'lon_0': '0', \
                 'proj': 'laea', 'lat_0': '90'}
    area_def = geometry.AreaDefinition(area_id, description, proj_id, \
                                       proj_dict, x_size, y_size, area_extent)
    e_p=np.zeros((20,241,480),float)
    nc=Dataset('evap_precip201411.nc','r')
    t=nc.variables['time'][:][20*4+2:30*4+2]
    t0=nc.variables['time'][:][0:1]
    e=nc.variables['e'][21*4+2:31*4+2,::-1,:]
    p=nc.variables['tp'][21*4+2:31*4+2,::-1,:]
    for i in range(1,e.shape[0],2):
        e[i,:,:]=e[i,:,:]-e[i-1,:,:]
        p[i,:,:]=p[i,:,:]-p[i-1,:,:]
        
    print datetime.datetime(1900,1,1)+datetime.timedelta(hours=int(t[0]))
    print datetime.datetime(1900,1,1)+datetime.timedelta(hours=int(t0[0]))
    print datetime.datetime(1900,1,1)+datetime.timedelta(hours=int(t[-1]))
    e_p=(e+p)
    lon,lat=np.meshgrid(0+np.arange(480)*0.75,-90+np.arange(241)*0.75)
    lon[lon>180]-=360.
    grid_def = geometry.GridDefinition(lons=lon, lats=lat)
    row_indices, \
        col_indices = \
                      utils.generate_nearest_neighbour_linesample_arrays(grid_def, area_def, 200000)
    
    msg_con = image.ImageContainer(e_p.sum(axis=0), grid_def)
    e_p_grid = msg_con.get_array_from_linesample(row_indices, col_indices)
    return e_p_grid
Exemple #14
0
x_size = 251
y_size = 251
description = 'Arctic EASE grid'
proj_id = 'ease_nh'
from pyresample import geometry, utils, image
area_id = 'ease_nh'
area_extent = (-7326849.0625, -7326849.0625, 7326849.0625, 7326849.0625)
proj_dict = {'a': '6371228.0', 'units': 'm', 'lon_0': '0', \
             'proj': 'laea', 'lat_0': '90'}
area_def = geometry.AreaDefinition(area_id, description, proj_id, \
                                   proj_dict, x_size, y_size, area_extent)
import numpy as np

lon, lat = np.meshgrid(-180 + 0.75 / 2 + np.arange(480) * 0.75,
                       -90 + np.arange(241) * 0.75)
grid_def = geometry.GridDefinition(lons=lon, lats=lat)
row_indices, \
    col_indices = \
                  utils.generate_nearest_neighbour_linesample_arrays(grid_def, area_def, 200000)

msg_con = image.ImageContainer(e_p.sum(axis=0), grid_def)
e_p_grid = msg_con.get_array_from_linesample(row_indices, col_indices)
import matplotlib.pyplot as plt
import matplotlib
matplotlib.rcParams.update({'font.size': 13})

m = Basemap(projection='npstere', boundinglat=20, lon_0=0, resolution='l')
plt.suptitle('Moisture Transport from Lagrangian Analysis', fontsize=14)
plt.subplot(111)

m.drawcoastlines()
Exemple #15
0
def resample_data(lats, lons, data):

    #Grid definition information of existing data
    grid_def = geometry.GridDefinition(lons=lons, lats=lats)

    #Wanted projection
    area_id = 'laps_scan'
    description = 'LAPS Scandinavian domain'
    proj_id = 'stere'
    proj = 'epsg:3995'
    lon_0 = 20.0
    lat_0 = 90.0

    #x_size = 1000
    #y_size = 1000

    #Corner points to be converted to wanted projection
    lon1 = -2.448425
    lat1 = 68.79139
    lon2 = 29.38635
    lat2 = 54.67893

    #Calculate coordinate points in projection
    p = Proj(init=proj)
    x1, y1 = p(lon1, lat1)
    x2, y2 = p(lon2, lat2)

    print x1, y1, x2, y2

    print abs(x1 - x2) / abs(y1 - y2)
    print abs(y1 - y2) / abs(x1 - x2)

    x_size = 1000
    y_size = abs(x1 - x2) / abs(y1 - y2) * 1000

    area_extent = (x1, y1, x2, y2)
    proj_dict = {
        'a': '6371228.0',
        'units': 'm',
        'lon_0': lon_0,
        'proj': proj_id,
        'lat_0': lat_0
    }
    area_def = geometry.AreaDefinition(area_id, description, proj_id,
                                       proj_dict, x_size, y_size, area_extent)

    print area_def

    #Finland domain
    #lon1=16.52893
    #lat1=70.34990
    #lon2=31.85138
    #lat2=58.76623

    #Resampling data
    laps_con_quick = image.ImageContainerQuick(data, grid_def)
    area_con_quick = laps_con_quick.resample(area_def)
    result_data_quick = area_con_quick.image_data
    laps_con_nn = image.ImageContainerNearest(data,
                                              grid_def,
                                              radius_of_influence=50000)
    area_con_nn = laps_con_nn.resample(area_def)
    result_data_nn = area_con_nn.image_data

    print result_data_nn
def write_results(date, enkf_c_dir, ens_out_dir, Nens, save_dir, obs_list):

    smnd = str(date.month) if date.month > 9 else '0' + str(date.month)
    sday = str(date.day) if date.day > 9 else '0' + str(date.day)
    file = save_dir + 'Assim_summary_' + str(date.year) + smnd + sday + '.nc'

    # Generate the netcdf, shoudl contain aice, vice, before and after in addition,
    # mem1 aicen before and after and sst and vice
    # Can add more later on

    # Read in temp file and use this as template for the dimensions
    print(enkf_c_dir + 'ensemble_6565/mem001_temp.nc')
    tt = xr.open_dataset(enkf_c_dir + 'ensemble_6565/mem001_temp.nc')
    temp = tt['temp']

    print(file)
    ds = nc.Dataset(file, 'w', format='NETCDF4')

    time = ds.createDimension('time', None)
    times = ds.createVariable('time', 'f4', ('time', ))
    times[:] = nc.date2num(date,
                           units='days since 1990-01-01',
                           calendar='gregorian')
    times.units = 'days since 1990-01-01'
    times.calendar = 'gregorian'

    de = ds.createDimension('de', Nens)  # Ens
    di = ds.createDimension('di', 5)  # Ice categories
    dz = ds.createDimension('dz', temp.shape[1])  # Depth levels
    dx = ds.createDimension('dx', temp.shape[2])
    dy = ds.createDimension('dy', temp.shape[3])

    des = ds.createVariable('de', 'f4', ('de', ))
    dis = ds.createVariable('di', 'f4', ('di', ))
    dzs = ds.createVariable('dz', 'f4', ('dz', ))
    dxs = ds.createVariable('dx', 'f4', ('dx', ))
    dys = ds.createVariable('dy', 'f4', ('dy', ))

    #print(Nens)
    #print(np.arange(0, Nens, 1.0))
    des[:] = np.arange(0, Nens, 1.0)
    dis[:] = np.arange(0, 5, 1.0)
    dzs[:] = np.arange(0, temp.shape[1], 1.0)
    dxs[:] = np.arange(0, temp.shape[2], 1.0)
    dys[:] = np.arange(0, temp.shape[3], 1.0)

    tt.close()

    aicen_mem1_before = ds.createVariable('aicen1_inn', 'f4', (
        'time',
        'di',
        'dx',
        'dy',
    ))
    aicen_mem1_after = ds.createVariable('aicen1_out', 'f4', (
        'time',
        'di',
        'dx',
        'dy',
    ))

    vicen_mem1_before = ds.createVariable('vicen1_inn', 'f4', (
        'time',
        'di',
        'dx',
        'dy',
    ))
    vicen_mem1_after = ds.createVariable('vicen1_out', 'f4', (
        'time',
        'di',
        'dx',
        'dy',
    ))

    aice_before = ds.createVariable('aice_inn', 'f4', (
        'time',
        'de',
        'dx',
        'dy',
    ))
    aice_after = ds.createVariable('aice_out', 'f4', (
        'time',
        'de',
        'dx',
        'dy',
    ))

    vice_before = ds.createVariable('vice_inn', 'f4', (
        'time',
        'de',
        'dx',
        'dy',
    ))
    vice_after = ds.createVariable('vice_out', 'f4', (
        'time',
        'de',
        'dx',
        'dy',
    ))

    temp_mem1_before = ds.createVariable('temp1_inn', 'f4', (
        'time',
        'dz',
        'dx',
        'dy',
    ))
    temp_mem1_after = ds.createVariable('temp1_out', 'f4', (
        'time',
        'dz',
        'dx',
        'dy',
    ))

    sst_before = ds.createVariable('sst_inn', 'f4', (
        'time',
        'de',
        'dx',
        'dy',
    ))
    sst_after = ds.createVariable('sst_out', 'f4', (
        'time',
        'de',
        'dx',
        'dy',
    ))

    file_ens = open(enkf_c_dir + 'files_in_ensemble', 'r')
    Lines = file_ens.readlines()
    file_count = 0

    for ll in Lines:
        file_count += 1
        sens = str(file_count) if file_count > 9 else '0' + str(file_count)
        file_out_ice = ens_out_dir + 'iced.' + str(
            date.year) + smnd + sday + '_' + ll[0:-1] + '.nc'
        file_out_ocn = ens_out_dir + 'ocean.' + str(
            date.year) + smnd + sday + '_' + ll[0:-1] + '.nc'

        ############ Write the inn first ###################
        # Write aice_inn to res
        file_inn = enkf_c_dir + 'ensemble_6565/mem0' + sens + '_aice.nc'
        handle = xr.open_dataset(file_inn)
        aice_before[0, int(ll[0:-1]) - 1, :, :] = handle['aice'][0, :, :]
        handle.close()

        # Write vice_inn to res
        file_inn = enkf_c_dir + 'ensemble_6565/mem0' + sens + '_vice.nc'
        handle = xr.open_dataset(file_inn)
        vice_before[0, int(ll[0:-1]) - 1, :, :] = handle['vice'][0, :, :]
        handle.close()

        # Write sst_inn to res
        file_inn = enkf_c_dir + 'ensemble_6565/mem0' + sens + '_sst.nc'
        handle = xr.open_dataset(file_inn)
        sst_before[0, int(ll[0:-1]) - 1, :, :] = handle['sst'][0, :, :]
        handle.close()

        # Write the full member 1 states
        if file_count == 1:
            file_inn = enkf_c_dir + 'ensemble_6565/mem0' + sens + '_aicen.nc'
            handle = xr.open_dataset(file_inn)
            aicen_mem1_before[0, :, :, :] = handle['aicen'][0, :, :, :]
            nx_size = handle['aicen'].shape[2]
            handle.close()

            file_inn = enkf_c_dir + 'ensemble_6565/mem0' + sens + '_vicen.nc'
            handle = xr.open_dataset(file_inn)
            vicen_mem1_before[0, :, :, :] = handle['vicen'][0, :, :, :]
            handle.close()

            file_inn = enkf_c_dir + 'ensemble_6565/mem0' + sens + '_temp.nc'
            handle = xr.open_dataset(file_inn)
            temp_mem1_before[0, :, :, :] = handle['temp'][0, :, :, :]
            handle.close()
        ###################################################################

        #####################   Write the out results #####################
        handle = xr.open_dataset(file_out_ocn)
        sst_after[0, int(ll[0:-1]) - 1, :, :] = handle['temp'][0, 41, :, :]
        if file_count == 1:
            temp_mem1_after[0, :, :, :] = handle['temp'][0, :, :, :]
        handle.close()

        handle = xr.open_dataset(file_out_ice)
        nx_size2 = handle['aicen'].shape[1]
        ice_halo_cells = True if nx_size2 > nx_size else False

        if ice_halo_cells:
            aice_after[0,
                       int(ll[0:-1]) - 1, :, :] = np.sum(handle['aicen'][:,
                                                                         1:-1,
                                                                         1:-1],
                                                         axis=0)
            vice_after[0,
                       int(ll[0:-1]) - 1, :, :] = np.sum(handle['vicen'][:,
                                                                         1:-1,
                                                                         1:-1],
                                                         axis=0)
            if file_count == 1:
                aicen_mem1_after[0, :, :, :] = handle['aicen'][:, 1:-1, 1:-1]
                vicen_mem1_after[0, :, :, :] = handle['vicen'][:, 1:-1, 1:-1]
        else:
            aice_after[0, int(ll[0:-1]) - 1, :, :] = np.sum(
                handle['aicen'][:, :, :], axis=0)
            vice_after[0, int(ll[0:-1]) - 1, :, :] = np.sum(
                handle['vicen'][:, :, :], axis=0)
            if file_count == 1:
                aicen_mem1_after[0, :, :, :] = handle['aicen'][:, :, :]
                vicen_mem1_after[0, :, :, :] = handle['vicen'][:, :, :]

    ####################################################################

    ##################### Also write the observations for easy reference? #####
    # Might convert it first so it might be easier to compare? ################
    # With several observations potentially a list of string could be used here,
    #OSISAF
    file_osisaf = enkf_c_dir + 'obs/OSISAF/this_day.nc'
    file_amsr = enkf_c_dir + 'obs/AMSR/this_day.nc'
    grid_file = enkf_c_dir + 'conf/new_grid_ice.nc'

    Nens = ds.createDimension('Nens', None)
    Obs1 = ds.createVariable('Obs1', 'f4', (
        'time',
        'dx',
        'dy',
        'Nens',
    ))

    handle2 = xr.open_dataset(grid_file)
    lon_mod = handle2['lon']
    lat_mod = handle2['lat']
    mod_grid_def = geometry.GridDefinition(lons=lon_mod, lats=lat_mod)

    i = -1
    for obs in obs_list:
        fileobs = enkf_c_dir + '/obs/' + obs + '/this_day.nc'
        if os.path.exists(fileobs):
            i += 1
            if obs == 'AMSR' or obs == 'SSMIS':
                varname = 'ice_conc'
            elif obs == 'SMOS' or obs == 'CRYO':
                varname = 'sit'
            elif obs == 'MUR':
                varname = 'sst'
            handle = xr.open_dataset(fileobs)
            ice_conc = handle[varname]
            lon_obs = handle['lon']
            lat_obs = handle['lat']
            obs_grid_def = geometry.GridDefinition(lons=lon_obs, lats=lat_obs)

            # Fix future warning!
            obs_container = image.ImageContainerNearest(
                ice_conc[0, :, :].values,
                obs_grid_def,
                radius_of_influence=20000)
            obs_modelgrid = obs_container.resample(mod_grid_def)
            res = obs_modelgrid.image_data

            Obs1[0, :, :, i] = res[:]

    ds.close()
from pyresample import geometry, image


date1 = datetime(2016, 7, 1, 15)
ref_date = datetime(2016, 1, 1, 0)
enddate = datetime(2016, 8, 2, 12)
outname_03h = '/lustre/scratch/twixtrom/ST4_201607_03h.nc'
outname_01h = '/lustre/scratch/twixtrom/ST4_201607_01h.nc'
dtype = 'f4'

print('Getting Obs Grid')
# Open a stageIV file and get out the grid
grib = pygrib.open('/lustre/scratch/twixtrom/stage4/ST4.2016010112.01h')
apcp = grib.read(1)[0]
obs_lat, obs_lon = apcp.latlons()
obs_grid = geometry.GridDefinition(lons=obs_lon, lats=obs_lat)

print('Getting forecast grids')
# Get the 12-km forecast grid
fcst_12km = Dataset('/lustre/scratch/twixtrom/adaptive_wrf_post/control_thompson/2016010112/wrfprst_d01_2016010112.nc')
fcst_lat_12km = fcst_12km.variables['lat'][0,]
fcst_lon_12km = fcst_12km.variables['lon'][0,]
fcst_grid_12km = geometry.GridDefinition(lons=fcst_lon_12km, lats=fcst_lat_12km)

# Get the 4-km forecast grid
fcst_4km = Dataset('/lustre/scratch/twixtrom/adaptive_wrf_post/control_thompson/2016010112/wrfprst_d02_2016010112.nc')
fcst_lat_4km = fcst_4km.variables['lat'][0,]
fcst_lon_4km = fcst_4km.variables['lon'][0,]
fcst_grid_4km = geometry.GridDefinition(lons=fcst_lon_4km, lats=fcst_lat_4km)

def download_MUR(date, obs_dir, custom_grid_file=None):
    # Because of size there is no option for storing the full original file
    # Same as for downloading the CRYO data there is a need for a username a earth data login
    # https://wiki.earthdata.nasa.gov/display/EL/How+To+Access+Data+With+cURL+And+Wget
    # Because the original MUR files are so large the only option currently is to get the observations
    # on the custom grid provided.
    cryo_usr = '******'
    mur_dir = obs_dir + '/MUR_new/'
    mur_postxt = '090000-JPL-L4_GHRSST-SSTfnd-MUR-GLOB-v02.0-fv04.1.nc'
    mur_pretxt = 'https://podaac-opendap.jpl.nasa.gov/opendap/allData/ghrsst/data/GDS2/L4/GLOB/JPL/MUR/v4.1/'
    dl = [date]
    mur_file = mur_dir + date.strftime('%Y%m%d') + mur_postxt
    res = cmd('wget -P ' + mur_dir + ' ' + cryo_usr + ' ' + mur_pretxt +
              date.strftime('%Y') + '/' + date.strftime('%j') + '/' +
              date.strftime('%Y%m%d') + mur_postxt)

    # If file exists
    if res == 0:
        cmd('ncks -O -d lat,13000,17998 ' + mur_file + ' ' + mur_file)
        cmd('ncks -O -v analysed_sst,analysis_error ' + mur_file + ' ' +
            mur_file)
        # Just make a new file as I need to convert the grid and also make new lon/lat files.
        DS = xr.open_dataset(mur_file)
        murlon = DS['lon']
        murlat = DS['lat']

        murlat2 = np.transpose(repmat(murlat.data, len(murlon), 1))
        murlon2 = repmat(murlon.data, len(murlat), 1)

        mur_grid_def = geometry.GridDefinition(lons=murlon2, lats=murlat2)

        sst = DS['analysed_sst']
        sst_err = DS['analysis_error']

        #Read custom grid
        DS_bar = xr.open_dataset(custom_grid_file)
        barlon = DS_bar['lon']
        barlat = DS_bar['lat']
        bar_grid_def = geometry.GridDefinition(lons=barlon, lats=barlat)

        # Convert to barents
        obs_container = image.ImageContainerNearest(sst.data[0, :, :],
                                                    mur_grid_def,
                                                    radius_of_influence=2000)
        obs_modelgrid = obs_container.resample(bar_grid_def)
        sst_bar = obs_modelgrid.image_data
        obs_container = image.ImageContainerNearest(sst_err.data[0, :, :],
                                                    mur_grid_def,
                                                    radius_of_influence=2000)
        obs_modelgrid = obs_container.resample(bar_grid_def)
        sst_err2 = obs_modelgrid.image_data
        # Sematics

        # Remove nan values
        sst_bar = sst_bar - 273.15
        sst_bar[np.isnan(sst_bar)] = -3
        sst_err2[np.isnan(sst_err2)] = 10
        # Write to file
        ds = xr.Dataset(
            {
                "sst": (('time', 'x', 'y'), np.expand_dims(sst_bar, axis=0)),
                "error_std":
                (('time', 'x', 'y'), np.expand_dims(sst_err2, axis=0)),
                "lon": (('x', 'y'), barlon),
                "lat": (('x', 'y'), barlat)
            },
            coords={
                'time': dl[0:1],
            },
        )
        ds.to_netcdf(mur_dir + 'MUR_' + date.strftime("%Y%m%d") + '.nc')
        cmd('rm ' + mur_file)
Exemple #19
0
def download_amsr_Sindre(date, wdir, enkf_c_dir, add_barents=True):
    # date = datetime.datetime(2018,1,1)
    # wdir = '/home/sindremf/PHD2/Work/Test_assimiation/'
    # enkf_c_dir = '/home/sindremf/PHD2/Work/Assim_enkf-c/'

    grid_file = enkf_c_dir + 'conf/new_grid_ice.nc'

    # Download data with Keguangfunction
    damsr.amsr2_download(date.strftime('%Y%m%d'), wdir)

    handle = nc.Dataset(wdir + 'amsr2_' + date.strftime('%Y%m%d') + '.nc', 'r')

    X0 = handle['x'][:]
    Y0 = handle['y'][:]
    X02 = npm.repmat(X0, len(Y0), 1)
    Y02 = npm.repmat(Y0, len(X0), 1).transpose()
    sic = handle['z'][:]

    # Get lon/lat from projection
    P = Proj('epsg:3411')
    lon2, lat2 = P(X02, Y02, inverse=True)

    # Calculate
    sic2 = sic * 0.01

    # estimate uncertainty (see Spreen et al., 2008)
    Psw, Epsw = 82.0, 4.0
    Psi, Epsi = 10.0, 4.0
    Tauw, Etauw = 0.27, 0.1
    Taui, Etaui = 0.14, 0.035
    #d3, d2, d1, d0 = 1.64e-5, -1.6e-3, 1.92e-2, 0.971
    d3, d2, d1, d0 = 5.587e-06, -5.218e-04, -1.226e-02, 1.116

    Ps = sic2 * Psi + (1 - sic2) * Psw
    Tau = sic2 * Taui + (1 - sic2) * Tauw
    Etau = sic2 * Etaui + (1 - sic2) * Etauw
    ac = 1.1 * np.exp(-2 * Tau) - 0.11 * np.exp(-Tau)
    P = Ps * ac

    Ep2 = (Ps*Etau*(0.11*np.exp(-Tau)-2.2*np.exp(-2*Tau)))**2 + \
          (ac*(1-sic2)*Epsw)**2 + (ac*sic2*Epsi)**2
    err2 = np.abs(3 * d3 * P**2 + 2 * d2 * P + d1) * np.sqrt(Ep2) * 100

    file = wdir + 'AMSR2Full-' + date.strftime('%Y%m%d') + '.nc'

    # Scale to decimal
    siconc = sic.data[:] / 100
    err3 = err2.data[:] / 100

    # Set nan values to -1
    err3[np.isnan(siconc)] = -1
    siconc[np.isnan(siconc)] = -1

    # Change water uncertainty to 0.05, it is way to big
    err3[siconc == 0] = 0.05

    # Create a new netcdf file

    try:
        os.remove(file)
    except:
        pass

    ds = nc.Dataset(file, 'w', format='NETCDF4')

    time = ds.createDimension('time', None)
    times = ds.createVariable('time', 'f4', ('time', ))
    times[:] = nc.date2num(date,
                           units='days since 1990-01-01',
                           calendar='gregorian')
    times.units = 'days since 1990-01-01'
    times.calendar = 'gregorian'

    #dx = ds.createDimension('dx', len(X0))
    #dy = ds.createDimension('dy', len(Y0))

    #dxs = ds.createVariable('dx', 'f4', ('dx',))
    #dys = ds.createVariable('dy', 'f4', ('dy',))

    #print(Nens)
    #print(np.arange(0, Nens, 1.0))
    #dxs[:] = X0
    #dys[:] = Y0

    #lon = ds.createVariable('lon', 'f4', ('dy', 'dx',))
    #lat = ds.createVariable('lat', 'f4', ('dy', 'dx',))

    #lon[:] = lon2
    #lat[:] = lat2
    #err = ds.createVariable('error_std', 'f4', ('time', 'dy', 'dx',))
    #iceconc = ds.createVariable('iceconc', 'f4', ('time', 'dy', 'dx',))
    #iceconc[0,:,:] = siconc
    #err[0,:,:] = err3

    if add_barents:

        # Load the barents grid
        handle2 = xr.open_dataset(grid_file)
        lon_mod = handle2['lon']
        lat_mod = handle2['lat']
        mod_grid_def = geometry.GridDefinition(lons=lon_mod, lats=lat_mod)

        dx2 = ds.createDimension('dx2', lon_mod.shape[0])
        dy2 = ds.createDimension('dy2', lon_mod.shape[1])

        #dxs2 = ds.createVariable('dx2', 'f4', ('dx',))
        #dys2 = ds.createVariable('dy2', 'f4', ('dy',))

        lon4 = ds.createVariable('lon', 'f4', (
            'dx2',
            'dy2',
        ))
        lat4 = ds.createVariable('lat', 'f4', (
            'dx2',
            'dy2',
        ))

        lon4[:] = lon_mod
        lat4[:] = lat_mod

        obs_grid_def = geometry.GridDefinition(lons=lon2, lats=lat2)

        # Convert sicconc to barents
        obs_container = image.ImageContainerNearest(
            siconc, obs_grid_def, radius_of_influence=200000000)
        obs_modelgrid = obs_container.resample(mod_grid_def)
        siconc_bar = obs_modelgrid.image_data

        # Convert error to barents
        obs_container = image.ImageContainerNearest(
            err3, obs_grid_def, radius_of_influence=200000000)
        obs_modelgrid = obs_container.resample(mod_grid_def)
        err3_bar = obs_modelgrid.image_data

        bar = ds.createVariable('ice_conc', 'f4', (
            'time',
            'dx2',
            'dy2',
        ))
        Err_bar = ds.createVariable('error_std', 'f4', (
            'time',
            'dx2',
            'dy2',
        ))

        bar[0, :, :] = siconc_bar
        Err_bar[0, :, :] = err3_bar

        ds.close()
        handle2.close()

        handle.close()

    os.remove(wdir + 'amsr2_' + date.strftime('%Y%m%d') + '.nc')
    os.rename(file, wdir + 'amsr2_' + date.strftime('%Y%m%d') + '.nc')