def test_fieldset_from_file_subsets(indslon,
                                    indslat,
                                    tmpdir,
                                    filename='test_subsets'):
    """ Test for subsetting fieldset from file using indices dict. """
    data, dimensions = generate_fieldset(100, 100)
    filepath = tmpdir.join(filename)
    fieldsetfull = FieldSet.from_data(data, dimensions)
    fieldsetfull.write(filepath)
    indices = {'lon': indslon, 'lat': indslat}
    fieldsetsub = FieldSet.from_parcels(filepath, indices=indices)
    assert np.allclose(fieldsetsub.U.lon,
                       fieldsetfull.U.grid.lon[indices['lon']])
    assert np.allclose(fieldsetsub.U.lat,
                       fieldsetfull.U.grid.lat[indices['lat']])
    assert np.allclose(fieldsetsub.V.lon,
                       fieldsetfull.V.grid.lon[indices['lon']])
    assert np.allclose(fieldsetsub.V.lat,
                       fieldsetfull.V.grid.lat[indices['lat']])

    ixgrid = np.ix_([0], indices['lat'], indices['lon'])
    assert np.allclose(fieldsetsub.U.data, fieldsetfull.U.data[ixgrid])
    assert np.allclose(fieldsetsub.V.data, fieldsetfull.V.data[ixgrid])
Example #2
0
def test_fieldset_write_curvilinear(tmpdir):
    fname = path.join(path.dirname(__file__), 'test_data',
                      'mask_nemo_cross_180lon.nc')
    filenames = {'dx': fname, 'mesh_mask': fname}
    variables = {'dx': 'e1u'}
    dimensions = {'lon': 'glamu', 'lat': 'gphiu'}
    fieldset = FieldSet.from_nemo(filenames, variables, dimensions)
    assert fieldset.dx.creation_log == 'from_nemo'

    newfile = tmpdir.join('curv_field')
    fieldset.write(newfile)

    fieldset2 = FieldSet.from_netcdf(filenames=newfile + 'dx.nc',
                                     variables={'dx': 'dx'},
                                     dimensions={
                                         'lon': 'nav_lon',
                                         'lat': 'nav_lat'
                                     })
    assert fieldset2.dx.creation_log == 'from_netcdf'

    for var in ['lon', 'lat', 'data']:
        assert np.allclose(getattr(fieldset2.dx, var),
                           getattr(fieldset.dx, var))
Example #3
0
def test_inversedistance_nearland(pset_mode,
                                  mode,
                                  arrtype,
                                  k_sample_p,
                                  npart=81):
    dims = (4, 4, 6)
    P = np.random.rand(dims[0], dims[1],
                       dims[2]) + 2 if arrtype == 'rand' else np.ones(
                           dims, dtype=np.float32)
    P[1, 1:2, 1:6] = np.nan  # setting some values to land (NaN)
    dimensions = {
        'lon': np.linspace(0., 1., dims[2], dtype=np.float32),
        'lat': np.linspace(0., 1., dims[1], dtype=np.float32),
        'depth': np.linspace(0., 1., dims[0], dtype=np.float32)
    }
    data = {
        'U': np.zeros(dims, dtype=np.float32),
        'V': np.zeros(dims, dtype=np.float32),
        'P': P
    }
    fieldset = FieldSet.from_data(data, dimensions, mesh='flat')
    fieldset.P.interp_method = 'linear_invdist_land_tracer'

    xv, yv = np.meshgrid(np.linspace(0.1, 0.9, int(np.sqrt(npart))),
                         np.linspace(0.1, 0.9, int(np.sqrt(npart))))
    # combine a pset at 0m with pset at 1m, as meshgrid does not do 3D
    pset = pset_type[pset_mode]['pset'](fieldset,
                                        pclass=pclass(mode),
                                        lon=xv.flatten(),
                                        lat=yv.flatten(),
                                        depth=np.zeros(npart))
    pset2 = pset_type[pset_mode]['pset'](fieldset,
                                         pclass=pclass(mode),
                                         lon=xv.flatten(),
                                         lat=yv.flatten(),
                                         depth=np.ones(npart))
    pset.add(pset2)
    pset.execute(k_sample_p, endtime=1, dt=1)
    if arrtype == 'rand':
        assert np.all((pset.p > 2) & (pset.p < 3))
    else:
        assert np.allclose(pset.p, 1.0, rtol=1e-5)

    success = False
    try:
        fieldset.U.interp_method = 'linear_invdist_land_tracer'
        fieldset.check_complete()
    except NotImplementedError:
        success = True
    assert success
Example #4
0
def fieldset_from_globcurrent(chunk_mode):
    filenames = path.join(path.dirname(__file__), 'GlobCurrent_example_data',
                          '200201*-GLOBCURRENT-L4-CUReul_hs-ALT_SUM-v02.0-fv01.0.nc')
    variables = {'U': 'eastward_eulerian_current_velocity', 'V': 'northward_eulerian_current_velocity'}
    dimensions = {'lat': 'lat', 'lon': 'lon', 'time': 'time'}
    chs = False
    if chunk_mode == 'auto':
        chs = 'auto'
    elif chunk_mode == 'specific':
        chs = {'U': {'lat': 16, 'lon': 16},
               'V': {'lat': 16, 'lon': 16}}

    fieldset = FieldSet.from_netcdf(filenames, variables, dimensions, field_chunksize=chs)
    return fieldset
Example #5
0
def test_fieldset_diffgrids_from_file_data(tmpdir, filename='test_subsets'):
    """ Test for subsetting fieldset from file using indices dict. """
    data, dimensions = generate_fieldset(100, 100)
    filepath = tmpdir.join(filename)
    fieldset_data = FieldSet.from_data(data, dimensions)
    fieldset_data.write(filepath)
    field_data = fieldset_data.U
    field_data.name = "B"

    ufiles = [filepath+'U.nc', ] * 4
    vfiles = [filepath+'V.nc', ] * 4
    timestamps = np.arange(0, 4, 1) * 86400.0
    timestamps = np.expand_dims(timestamps, 1)
    files = {'U': ufiles, 'V': vfiles}
    variables = {'U': 'vozocrtx', 'V': 'vomecrty'}
    dimensions = {'lon': 'nav_lon', 'lat': 'nav_lat'}
    chs = 'auto'
    fieldset_file = FieldSet.from_netcdf(files, variables, dimensions, timestamps=timestamps, allow_time_extrapolation=True, field_chunksize=chs)

    fieldset_file.add_field(field_data, "B")
    assert len(fieldset_file.get_fields()) == 3
    assert fieldset_file.gridset.size == 2
    assert fieldset_file.U.grid != fieldset_file.B.grid
Example #6
0
def test_fieldset_celledgesizes_curvilinear(dx, dy):
    fname = path.join(path.dirname(__file__), 'test_data', 'mask_nemo_cross_180lon.nc')
    filenames = {'dx': fname, 'dy': fname, 'mesh_mask': fname}
    variables = {'dx': dx, 'dy': dy}
    dimensions = {'dx': {'lon': 'glamu', 'lat': 'gphiu'},
                  'dy': {'lon': 'glamu', 'lat': 'gphiu'}}
    fieldset = FieldSet.from_nemo(filenames, variables, dimensions)

    # explicitly setting cell_edge_sizes from e1u and e2u etc
    fieldset.dx.grid.cell_edge_sizes['x'] = fieldset.dx.data
    fieldset.dx.grid.cell_edge_sizes['y'] = fieldset.dy.data

    A = fieldset.dx.cell_areas()
    assert np.allclose(A, fieldset.dx.data * fieldset.dy.data)
Example #7
0
def test_fieldset_samegrids_from_file(tmpdir, chunksize, filename='test_subsets'):
    """ Test for subsetting fieldset from file using indices dict. """
    data, dimensions = generate_fieldset(100, 100)
    filepath1 = tmpdir.join(filename+'_1')
    fieldset1 = FieldSet.from_data(data, dimensions)
    fieldset1.write(filepath1)

    ufiles = [filepath1+'U.nc', ] * 4
    vfiles = [filepath1+'V.nc', ] * 4
    timestamps = np.arange(0, 4, 1) * 86400.0
    timestamps = np.expand_dims(timestamps, 1)
    files = {'U': ufiles, 'V': vfiles}
    variables = {'U': 'vozocrtx', 'V': 'vomecrty'}
    dimensions = {'lon': 'nav_lon', 'lat': 'nav_lat'}
    fieldset = FieldSet.from_netcdf(files, variables, dimensions, timestamps=timestamps, allow_time_extrapolation=True, chunksize=chunksize)

    if chunksize == 'auto':
        assert fieldset.gridset.size == 2
        assert fieldset.U.grid != fieldset.V.grid
    else:
        assert fieldset.gridset.size == 1
        assert fieldset.U.grid == fieldset.V.grid
        assert fieldset.U.chunksize == fieldset.V.chunksize
Example #8
0
def stommel_fieldset(xdim=200, ydim=200):
    """Simulate a periodic current along a western boundary, with significantly
    larger velocities along the western edge than the rest of the region

    The original test description can be found in: N. Fabbroni, 2009,
    Numerical Simulation of Passive tracers dispersion in the sea,
    Ph.D. dissertation, University of Bologna
    http://amsdottorato.unibo.it/1733/1/Fabbroni_Nicoletta_Tesi.pdf
    """
    # Set NEMO fieldset variables
    depth = np.zeros(1, dtype=np.float32)
    time = np.linspace(0., 100000. * 86400., 2, dtype=np.float64)

    # Some constants
    A = 100
    eps = 0.05
    a = 10000
    b = 10000

    # Coordinates of the test fieldset (on A-grid in deg)
    lon = np.linspace(0, a, xdim, dtype=np.float32)
    lat = np.linspace(0, b, ydim, dtype=np.float32)

    # Define arrays U (zonal), V (meridional), W (vertical) and P (sea
    # surface height) all on A-grid
    U = np.zeros((lon.size, lat.size, time.size), dtype=np.float32)
    V = np.zeros((lon.size, lat.size, time.size), dtype=np.float32)
    P = np.zeros((lon.size, lat.size, time.size), dtype=np.float32)

    [x, y] = np.mgrid[:lon.size, :lat.size]
    l1 = (-1 + math.sqrt(1 + 4 * math.pi**2 * eps**2)) / (2 * eps)
    l2 = (-1 - math.sqrt(1 + 4 * math.pi**2 * eps**2)) / (2 * eps)
    c1 = (1 - math.exp(l2)) / (math.exp(l2) - math.exp(l1))
    c2 = -(1 + c1)
    for t in range(time.size):
        for i in range(lon.size):
            for j in range(lat.size):
                xi = lon[i] / a
                yi = lat[j] / b
                P[i, j, t] = A * (c1*math.exp(l1*xi) + c2*math.exp(l2*xi) + 1) * math.sin(math.pi * yi)
        for i in range(lon.size-2):
            for j in range(lat.size):
                V[i+1, j, t] = (P[i+2, j, t] - P[i, j, t]) / (2 * a / xdim)
        for i in range(lon.size):
            for j in range(lat.size-2):
                U[i, j+1, t] = -(P[i, j+2, t] - P[i, j, t]) / (2 * b / ydim)

    data = {'U': U, 'V': V, 'P': P}
    dimensions = {'lon': lon, 'lat': lat, 'depth': depth, 'time': time}
    return FieldSet.from_data(data, dimensions, mesh='flat')
def brownian_fieldset(
        xdim=200,
        ydim=200):  # Define a flat fieldset of zeros, for simplicity.
    dimensions = {
        'lon': np.linspace(0, 600000, xdim, dtype=np.float32),
        'lat': np.linspace(0, 600000, ydim, dtype=np.float32)
    }

    data = {
        'U': np.zeros((xdim, ydim), dtype=np.float32),
        'V': np.zeros((xdim, ydim), dtype=np.float32)
    }

    return FieldSet.from_data(data, dimensions, mesh='flat')
Example #10
0
def test_advection_zonal(lon, lat, depth, mode, npart=10):
    """ Particles at high latitude move geographically faster due to
        the pole correction in `GeographicPolar`.
    """
    data2D = {
        'U': np.ones((lon.size, lat.size), dtype=np.float32),
        'V': np.zeros((lon.size, lat.size), dtype=np.float32)
    }
    data3D = {
        'U': np.ones((lon.size, lat.size, depth.size), dtype=np.float32),
        'V': np.zeros((lon.size, lat.size, depth.size), dtype=np.float32)
    }
    dimensions = {'lon': lon, 'lat': lat}
    fieldset2D = FieldSet.from_data(data2D,
                                    dimensions,
                                    mesh='spherical',
                                    transpose=True)

    pset2D = ParticleSet(fieldset2D,
                         pclass=ptype[mode],
                         lon=np.zeros(npart, dtype=np.float32) + 20.,
                         lat=np.linspace(0, 80, npart, dtype=np.float32))
    pset2D.execute(AdvectionRK4, runtime=delta(hours=2), dt=delta(seconds=30))
    assert (np.diff(np.array([p.lon for p in pset2D])) > 1.e-4).all()

    dimensions['depth'] = depth
    fieldset3D = FieldSet.from_data(data3D,
                                    dimensions,
                                    mesh='spherical',
                                    transpose=True)
    pset3D = ParticleSet(fieldset3D,
                         pclass=ptype[mode],
                         lon=np.zeros(npart, dtype=np.float32) + 20.,
                         lat=np.linspace(0, 80, npart, dtype=np.float32),
                         depth=np.zeros(npart, dtype=np.float32) + 10.)
    pset3D.execute(AdvectionRK4, runtime=delta(hours=2), dt=delta(seconds=30))
    assert (np.diff(np.array([p.lon for p in pset3D])) > 1.e-4).all()
Example #11
0
def test_fieldset_from_xarray(tdim):
    def generate_dataset(xdim, ydim, zdim=1, tdim=1):
        lon = np.linspace(0., 12, xdim, dtype=np.float32)
        lat = np.linspace(0., 12, ydim, dtype=np.float32)
        depth = np.linspace(0., 20., zdim, dtype=np.float32)
        if tdim:
            time = np.linspace(0., 10, tdim, dtype=np.float64)
            Uxr = np.ones((tdim, zdim, ydim, xdim), dtype=np.float32)
            Vxr = np.ones((tdim, zdim, ydim, xdim), dtype=np.float32)
            for t in range(Uxr.shape[0]):
                Uxr[t, :, :, :] = t / 10.
            coords = {'lat': lat, 'lon': lon, 'depth': depth, 'time': time}
            dims = ('time', 'depth', 'lat', 'lon')
        else:
            Uxr = np.ones((zdim, ydim, xdim), dtype=np.float32)
            Vxr = np.ones((zdim, ydim, xdim), dtype=np.float32)
            for z in range(Uxr.shape[0]):
                Uxr[z, :, :] = z / 2.
            coords = {'lat': lat, 'lon': lon, 'depth': depth}
            dims = ('depth', 'lat', 'lon')
        return xr.Dataset({
            'Uxr': xr.DataArray(Uxr, coords=coords, dims=dims),
            'Vxr': xr.DataArray(Vxr, coords=coords, dims=dims)
        })

    ds = generate_dataset(3, 3, 2, tdim)
    variables = {'U': 'Uxr', 'V': 'Vxr'}
    if tdim:
        dimensions = {
            'lat': 'lat',
            'lon': 'lon',
            'depth': 'depth',
            'time': 'time'
        }
    else:
        dimensions = {'lat': 'lat', 'lon': 'lon', 'depth': 'depth'}
    fieldset = FieldSet.from_xarray_dataset(ds,
                                            variables,
                                            dimensions,
                                            mesh='flat')
    assert fieldset.U.creation_log == 'from_xarray_dataset'

    pset = ParticleSet(fieldset, JITParticle, 0, 0, depth=20)

    pset.execute(AdvectionRK4, dt=1, runtime=10)
    if tdim == 10:
        assert np.allclose(pset.lon[0], 4.5) and np.allclose(pset.lat[0], 10)
    else:
        assert np.allclose(pset.lon[0], 5.0) and np.allclose(pset.lat[0], 10)
def test_EOSseawaterproperties_kernels(pset_mode, mode):
    fieldset = FieldSet.from_data(data={
        'U': 0,
        'V': 0,
        'psu_salinity': 40,
        'temperature': 40,
        'potemperature': 36.89073
    },
                                  dimensions={
                                      'lat': 0,
                                      'lon': 0,
                                      'depth': 0
                                  })
    fieldset.add_constant('refpressure', np.float(0))

    class PoTempParticle(ptype[mode]):
        potemp = Variable('potemp', dtype=np.float32)
        pressure = Variable('pressure', dtype=np.float32, initial=10000)

    pset = pset_type[pset_mode]['pset'](fieldset,
                                        pclass=PoTempParticle,
                                        lon=5,
                                        lat=5,
                                        depth=1000)
    pset.execute(PtempFromTemp, runtime=0, dt=0)
    assert np.allclose(pset[0].potemp, 36.89073)

    class TempParticle(ptype[mode]):
        temp = Variable('temp', dtype=np.float32)
        pressure = Variable('pressure', dtype=np.float32, initial=10000)

    pset = pset_type[pset_mode]['pset'](fieldset,
                                        pclass=TempParticle,
                                        lon=5,
                                        lat=5,
                                        depth=1000)
    pset.execute(TempFromPtemp, runtime=0, dt=0)
    assert np.allclose(pset[0].temp, 40)

    class TPressureParticle(ptype[mode]):
        pressure = Variable('pressure', dtype=np.float32)

    pset = pset_type[pset_mode]['pset'](fieldset,
                                        pclass=TempParticle,
                                        lon=5,
                                        lat=30,
                                        depth=7321.45)
    pset.execute(PressureFromLatDepth, runtime=0, dt=0)
    assert np.allclose(pset[0].pressure, 7500, atol=1e-2)
Example #13
0
def simulate_parcels(source_url, output_filename, lat, lon, 
                     wind_drift_factor= 0.0, 
                     velocity_average= True, duration=23):
    """
    source_url: local file or list of local files with fielddata in NetCDF-format
    output_filename: name of file in which to save calculated trajectory
    lat, lon: initial coordinates of single drifter or lists with coordinates for multiple drifters
    wind_drift_factor: fraction of wind-speed at which objects will be advected. Default is 0 (no direct wind-drift)
    velocity_average: Boolean variable deciding whether averaged horisontal velocities or surface velocities will be used. 
                      Default is average which is consistent with GPU Ocean
    duration: duration of the simulation in hours. Default is 24 hours.
    TODO: Add functionality to start drifters at a later time in the simulation like in GPU Ocean.
    """
    filenames = {'U' : source_url, 'V': source_url}
    dimensions = {'lat': 'lat','lon': 'lon','time': 'time'}

    if velocity_average: 
        variables = {'U': 'ubar', 'V': 'vbar'}
    else:
        variables = {'U': 'u', 'V': 'v'}

    fieldset = FieldSet.from_netcdf(filenames, variables, dimensions, interp_method = 'cgrid_velocity')
    
    if wind_drift_factor:
        Uwind = Field.from_netcdf(source_url, ('U', 'Uwind'), dimensions, field_chunksize='auto', interp_method = 'cgrid_velocity')
        Vwind = Field.from_netcdf(source_url, ('V', 'Vwind'), dimensions, field_chunksize='auto', interp_method = 'cgrid_velocity')
        Uwind.set_scaling_factor(wind_drift_factor)
        Vwind.set_scaling_factor(wind_drift_factor)
        fieldset = FieldSet(U = fieldset.U+ Uwind,V = fieldset.V+ Vwind)

    pset = ParticleSet.from_list(fieldset = fieldset, pclass = JITParticle, lon=lon, lat=lat)
    output_file = pset.ParticleFile(name = output_filename, outputdt = timedelta(minutes=5))

    pset.execute(AdvectionRK4, runtime = timedelta(hours = duration), dt = timedelta(minutes=5), output_file = output_file)

    output_file.export()    
def test_nearest_neighbour_interpolation2D(pset_mode, mode, k_sample_p, npart=81):
    dims = (2, 2)
    dimensions = {'lon': np.linspace(0., 1., dims[0], dtype=np.float32),
                  'lat': np.linspace(0., 1., dims[1], dtype=np.float32)}
    data = {'U': np.zeros(dims, dtype=np.float32),
            'V': np.zeros(dims, dtype=np.float32),
            'P': np.zeros(dims, dtype=np.float32)}
    data['P'][0, 1] = 1.
    fieldset = FieldSet.from_data(data, dimensions, mesh='flat', transpose=True)
    fieldset.P.interp_method = 'nearest'
    xv, yv = np.meshgrid(np.linspace(0., 1.0, int(np.sqrt(npart))), np.linspace(0., 1.0, int(np.sqrt(npart))))
    pset = pset_type[pset_mode]['pset'](fieldset, pclass=pclass(mode), lon=xv.flatten(), lat=yv.flatten())
    pset.execute(k_sample_p, endtime=1, dt=1)
    assert np.allclose(pset.p[(pset.lon < 0.5) & (pset.lat > 0.5)], 1.0, rtol=1e-5)
    assert np.allclose(pset.p[(pset.lon > 0.5) | (pset.lat < 0.5)], 0.0, rtol=1e-5)
Example #15
0
def test_from_netcdf_chunking(mode, time_periodic, chunksize, deferLoad):
    fnameU = path.join(path.dirname(__file__), 'test_data', 'perlinfieldsU.nc')
    fnameV = path.join(path.dirname(__file__), 'test_data', 'perlinfieldsV.nc')
    ufiles = [fnameU, ] * 4
    vfiles = [fnameV, ] * 4
    timestamps = np.arange(0, 4, 1) * 86400.0
    timestamps = np.expand_dims(timestamps, 1)
    files = {'U': ufiles, 'V': vfiles}
    variables = {'U': 'vozocrtx', 'V': 'vomecrty'}
    dimensions = {'lon': 'nav_lon', 'lat': 'nav_lat'}

    fieldset = FieldSet.from_netcdf(files, variables, dimensions, timestamps=timestamps, time_periodic=time_periodic, deferred_load=deferLoad, allow_time_extrapolation=True if time_periodic in [False, None] else False, chunksize=chunksize)
    pset = ParticleSet.from_line(fieldset, size=1, pclass=ptype[mode],
                                 start=(0.5, 0.5), finish=(0.5, 0.5))
    pset.execute(AdvectionRK4, dt=1, runtime=1)
Example #16
0
def test_advect_nemo(mode):
    data_path = path.join(path.dirname(__file__), 'test_data/')

    filenames = {'U': data_path + 'Uu_eastward_nemo_cross_180lon.nc',
                 'V': data_path + 'Vv_eastward_nemo_cross_180lon.nc',
                 'mesh_mask': data_path + 'mask_nemo_cross_180lon.nc'}
    variables = {'U': 'U', 'V': 'V'}
    dimensions = {'lon': 'glamf', 'lat': 'gphif'}
    field_set = FieldSet.from_nemo(filenames, variables, dimensions)

    lonp = 175.5
    latp = 81.5
    pset = ParticleSet.from_list(field_set, ptype[mode], lon=[lonp], lat=[latp])
    pset.execute(AdvectionRK4, runtime=delta(days=2), dt=delta(hours=6))
    assert abs(pset[0].lat - latp) < 1e-3
Example #17
0
def test_peninsula_file(mode, mesh, tmpdir):
    """Open fieldset files and execute"""
    gc.collect()
    fieldset = FieldSet.from_parcels(fieldsetfile(mesh, tmpdir),
                                     extra_fields={'P': 'P'},
                                     allow_time_extrapolation=True)
    outfile = tmpdir.join("Peninsula")
    pset = peninsula_example(fieldset, outfile, 5, mode=mode, degree=1)
    # Test advection accuracy by comparing streamline values
    err_adv = np.array([abs(p.p_start - p.p) for p in pset])
    assert (err_adv <= 1.e-3).all()
    # Test Field sampling accuracy by comparing kernel against Field sampling
    err_smpl = np.array(
        [abs(p.p - pset.fieldset.P[0., p.depth, p.lat, p.lon]) for p in pset])
    assert (err_smpl <= 1.e-3).all()
def generate_testfieldset(xdim, ydim, zdim, tdim):
    lon = np.linspace(0., 2., xdim, dtype=np.float32)
    lat = np.linspace(0., 1., ydim, dtype=np.float32)
    depth = np.linspace(0., 0.5, zdim, dtype=np.float32)
    time = np.linspace(0., tdim, tdim, dtype=np.float64)
    U = np.ones((xdim, ydim, zdim, tdim), dtype=np.float32)
    V = np.zeros((xdim, ydim, zdim, tdim), dtype=np.float32)
    P = 2. * np.ones((xdim, ydim, zdim, tdim), dtype=np.float32)
    data = {'U': U, 'V': V, 'P': P}
    dimensions = {'lon': lon, 'lat': lat, 'depth': depth, 'time': time}
    fieldset = FieldSet.from_data(data,
                                  dimensions,
                                  mesh='flat',
                                  transpose=True)
    fieldset.write('testfields')
Example #19
0
def test_fieldset_defer_loading_with_diff_time_origin(
        tmpdir, fail, filename='test_parcels_defer_loading'):
    filepath = tmpdir.join(filename)
    data0, dims0 = generate_fieldset(10, 10, 1, 10)
    dims0['time'] = np.arange(0, 10, 1) * 3600
    fieldset_out = FieldSet.from_data(data0, dims0)
    fieldset_out.U.grid.time_origin = TimeConverter(
        np.datetime64('2018-04-20'))
    fieldset_out.V.grid.time_origin = TimeConverter(
        np.datetime64('2018-04-20'))
    data1, dims1 = generate_fieldset(10, 10, 1, 10)
    if fail:
        dims1['time'] = np.arange(0, 10, 1) * 3600
    else:
        dims1['time'] = np.arange(0, 10, 1) * 1800 + (24 + 25) * 3600
    if fail:
        Wtime_origin = TimeConverter(np.datetime64('2018-04-22'))
    else:
        Wtime_origin = TimeConverter(np.datetime64('2018-04-18'))
    gridW = RectilinearZGrid(dims1['lon'],
                             dims1['lat'],
                             dims1['depth'],
                             dims1['time'],
                             time_origin=Wtime_origin)
    fieldW = Field('W', np.zeros(data1['U'].shape), grid=gridW)
    fieldset_out.add_field(fieldW)
    fieldset_out.write(filepath)
    fieldset = FieldSet.from_parcels(filepath, extra_fields={'W': 'W'})
    assert fieldset.U.creation_log == 'from_parcels'
    pset = ParticleSet.from_list(fieldset,
                                 pclass=JITParticle,
                                 lon=[0.5],
                                 lat=[0.5],
                                 depth=[0.5],
                                 time=[datetime.datetime(2018, 4, 20, 1)])
    pset.execute(AdvectionRK4_3D, runtime=delta(hours=4), dt=delta(hours=1))
Example #20
0
def set_globcurrent_fieldset(filename=None, indices=None, full_load=False):
    if filename is None:
        filename = path.join(
            path.dirname(__file__), 'GlobCurrent_example_data',
            '20*-GLOBCURRENT-L4-CUReul_hs-ALT_SUM-v02.0-fv01.0.nc')
    variables = {
        'U': 'eastward_eulerian_current_velocity',
        'V': 'northward_eulerian_current_velocity'
    }
    dimensions = {'lat': 'lat', 'lon': 'lon', 'time': 'time'}
    return FieldSet.from_netcdf(filename,
                                variables,
                                dimensions,
                                indices,
                                full_load=full_load)
Example #21
0
def test_advection_meridional(lon, lat, mode, npart=10):
    """ Particles at high latitude move geographically faster due to
        the pole correction in `GeographicPolar`.
    """
    data = {'U': np.zeros((lon.size, lat.size), dtype=np.float32),
            'V': np.ones((lon.size, lat.size), dtype=np.float32)}
    dimensions = {'lon': lon, 'lat': lat}
    fieldset = FieldSet.from_data(data, dimensions, mesh='spherical', transpose=True)

    pset = ParticleSet(fieldset, pclass=ptype[mode],
                       lon=np.linspace(-60, 60, npart, dtype=np.float32),
                       lat=np.linspace(0, 30, npart, dtype=np.float32))
    delta_lat = np.diff(np.array([p.lat for p in pset]))
    pset.execute(AdvectionRK4, runtime=delta(hours=2), dt=delta(seconds=30))
    assert np.allclose(np.diff(np.array([p.lat for p in pset])), delta_lat, rtol=1.e-4)
Example #22
0
def fieldset_geometric_polar(xdim=200, ydim=100):
    """ Standard earth fieldset with U and V equivalent to lon/lat in m
        and the inversion of the pole correction applied to U.
    """
    lon = np.linspace(-180, 180, xdim, dtype=np.float32)
    lat = np.linspace(-90, 90, ydim, dtype=np.float32)
    U, V = np.meshgrid(lat, lon)
    # Apply inverse of pole correction to U
    for i, y in enumerate(lat):
        U[:, i] *= cos(y * pi / 180)
    U *= 1000. * 1.852 * 60.
    V *= 1000. * 1.852 * 60.
    data = {'U': U, 'V': V}
    dimensions = {'lon': lon, 'lat': lat}
    return FieldSet.from_data(data, dimensions, mesh='spherical', transpose=True)
Example #23
0
def periodicfields(xdim, ydim, uvel, vvel):
    dimensions = {
        'lon': np.linspace(0., 1., xdim + 1, dtype=np.float32)
        [1:],  # don't include both 0 and 1, for periodic b.c.
        'lat': np.linspace(0., 1., ydim + 1, dtype=np.float32)[1:]
    }

    data = {
        'U': uvel * np.ones((xdim, ydim), dtype=np.float32),
        'V': vvel * np.ones((xdim, ydim), dtype=np.float32)
    }
    return FieldSet.from_data(data,
                              dimensions,
                              mesh='spherical',
                              transpose=True)
Example #24
0
def timeoscillation_fieldset(xdim, ydim):
    time = np.arange(0., 4. * 86400., 60. * 5., dtype=np.float64)
    lon = np.linspace(-20000, 20000, xdim, dtype=np.float32)
    lat = np.linspace(0, 40000, ydim, dtype=np.float32)

    U = np.zeros((time.size, lat.size, lon.size), dtype=np.float32)
    V = np.zeros((time.size, lat.size, lon.size), dtype=np.float32)

    for t in range(time.size):
        U[t, :, :] = A * np.cos(omega * time[t])
        V[t, :, :] = A

    data = {'U': U, 'V': V}
    dimensions = {'lon': lon, 'lat': lat, 'time': time}
    return FieldSet.from_data(data, dimensions, mesh='flat')
Example #25
0
def main(gc_dir, output_file, num_paths, runtime, dt):
    filepaths = "%s/*.nc" % gc_dir
    filenames = {'U': filepaths, 'V': filepaths}
    variables = {
        'U': 'eastward_eulerian_current_velocity',
        'V': 'northward_eulerian_current_velocity'
    }
    dimensions = {'lat': 'lat', 'lon': 'lon', 'time': 'time'}
    fieldset = FieldSet.from_netcdf(filenames,
                                    variables,
                                    dimensions,
                                    allow_time_extrapolation=True)

    output = {}

    for (loc_name, loc) in locations.iteritems():
        lat_range = get_range(loc[0], second_largest_divisor(num_paths), 100.0)
        lon_range = get_range(loc[1],
                              num_paths / second_largest_divisor(num_paths),
                              100.0)
        lats, lons = np.meshgrid(lat_range, lon_range)

        pset = ParticleSet(fieldset=fieldset,
                           pclass=JITParticle,
                           lon=lons,
                           lat=lats)

        #pset.show()

        paths = [[] for i in range(num_paths)]
        for d in range(runtime / dt):
            pset.execute(
                AdvectionRK4,
                runtime=timedelta(days=dt),
                dt=timedelta(days=dt),
                recovery={ErrorCode.ErrorOutOfBounds: delete_particle})

            for (i, particle) in enumerate(pset.particles):
                paths[i].append(
                    (trunc_float(particle.lat), trunc_float(particle.lon)))
                i += 1

        output[loc_name] = paths

        pset.show(savefile=posixpath.join("../path_images", loc_name))

    out = open(output_file, 'w')
    out.write(json.dumps(output))
Example #26
0
def stommel_fieldset(xdim=200, ydim=200, grid_type='A'):
    """Simulate a periodic current along a western boundary, with significantly
    larger velocities along the western edge than the rest of the region

    The original test description can be found in: N. Fabbroni, 2009,
    Numerical Simulation of Passive tracers dispersion in the sea,
    Ph.D. dissertation, University of Bologna
    http://amsdottorato.unibo.it/1733/1/Fabbroni_Nicoletta_Tesi.pdf
    """
    a = b = 10000 * 1e3
    scalefac = 0.05  # to scale for physically meaningful velocities
    dx, dy = a / xdim, b / ydim

    # Coordinates of the test fieldset (on A-grid in deg)
    lon = np.linspace(0, a, xdim, dtype=np.float32)
    lat = np.linspace(0, b, ydim, dtype=np.float32)

    # Define arrays U (zonal), V (meridional) and P (sea surface height)
    U = np.zeros((lat.size, lon.size), dtype=np.float32)
    V = np.zeros((lat.size, lon.size), dtype=np.float32)
    P = np.zeros((lat.size, lon.size), dtype=np.float32)

    beta = 2e-11
    r = 1 / (11.6 * 86400)
    es = r / (beta * a)

    for j in range(lat.size):
        for i in range(lon.size):
            xi = lon[i] / a
            yi = lat[j] / b
            P[j, i] = (1 - math.exp(-xi / es) - xi) * math.pi * np.sin(
                math.pi * yi) * scalefac
            if grid_type == 'A':
                U[j, i] = -(1 - math.exp(-xi / es) - xi) * math.pi**2 * np.cos(
                    math.pi * yi) * scalefac
                V[j, i] = (math.exp(-xi / es) / es - 1) * math.pi * np.sin(
                    math.pi * yi) * scalefac
    if grid_type == 'C':
        V[:, 1:] = (P[:, 1:] - P[:, 0:-1]) / dx * a
        U[1:, :] = -(P[1:, :] - P[0:-1, :]) / dy * b

    data = {'U': U, 'V': V, 'P': P}
    dimensions = {'lon': lon, 'lat': lat}
    fieldset = FieldSet.from_data(data, dimensions, mesh='flat')
    if grid_type == 'C':
        fieldset.U.interp_method = 'cgrid_velocity'
        fieldset.V.interp_method = 'cgrid_velocity'
    return fieldset
def test_recursive_errorhandling(mode, xdim=2, ydim=2):
    """Example script to show how recursaive error handling can work.

    In this example, a set of Particles is started at Longitude 0.5.
    These are run through a Kernel that throws an error if the
    Longitude is smaller than 0.7.
    The error Kernel then draws a new random number between 0 and 1

    Importantly, the 'normal' Kernel and Error Kernel keep iterating
    until a particle does have a longitude larger than 0.7.

    This behaviour can be useful if particles need to be 'pushed out'
    from e.g. land. Note however that current under-the-hood
    implementation is not extremely efficient, so code could be slow."""

    dimensions = {'lon': np.linspace(0., 1., xdim, dtype=np.float32),
                  'lat': np.linspace(0., 1., ydim, dtype=np.float32)}
    data = {'U': np.zeros((ydim, xdim), dtype=np.float32),
            'V': np.zeros((ydim, xdim), dtype=np.float32)}
    fieldset = FieldSet.from_data(data, dimensions, mesh='flat')

    # Set minimum value for valid longitudes (i.e. all longitudes < minlon are 'land')
    fieldset.add_constant('minlon', 0.7)

    # create a ParticleSet with all particles starting at centre of Field
    pset = ParticleSet.from_line(fieldset=fieldset, pclass=ptype[mode],
                                 start=(0.5, 0.5), finish=(0.5, 0.5), size=10)

    def TestLon(particle, fieldset, time):
        """Kernel to check whether a longitude is larger than fieldset.minlon.
        If not, the Kernel throws an error"""
        if particle.lon <= fieldset.minlon:
            return ErrorCode.Error

    def Error_RandomiseLon(particle, fieldset, time):
        """Error handling kernel that draws a new longitude.
        Note that this new longitude can be smaller than fieldset.minlon"""
        particle.lon = ParcelsRandom.uniform(0., 1.)

    ParcelsRandom.seed(123456)

    # The .execute below is only run for one timestep. Yet the
    # recovery={ErrorCode.Error: Error_RandomiseLon} assures Parcels keeps
    # attempting to move all particles beyond 0.7 longitude
    pset.execute(pset.Kernel(TestLon), runtime=1, dt=1,
                 recovery={ErrorCode.Error: Error_RandomiseLon})

    assert (pset.lon > fieldset.minlon).all()
Example #28
0
def test_add_duplicate_field(dupobject):
    data, dimensions = generate_fieldset(100, 100)
    fieldset = FieldSet.from_data(data, dimensions)
    field = Field('newfld', fieldset.U.data, lon=fieldset.U.lon, lat=fieldset.U.lat)
    fieldset.add_field(field)
    error_thrown = False
    try:
        if dupobject == 'same':
            fieldset.add_field(field)
        elif dupobject == 'new':
            field2 = Field('newfld', np.ones((2, 2)), lon=np.array([0, 1]), lat=np.array([0, 2]))
            fieldset.add_field(field2)
    except RuntimeError:
        error_thrown = True

    assert error_thrown
Example #29
0
def test_fieldset_constant(mode):
    data, dimensions = generate_fieldset(100, 100)
    fieldset = FieldSet.from_data(data, dimensions)
    westval = -0.2
    eastval = 0.3
    fieldset.add_constant('movewest', westval)
    fieldset.add_constant('moveeast', eastval)
    assert fieldset.movewest == westval

    pset = ParticleSet.from_line(fieldset,
                                 size=1,
                                 pclass=ptype[mode],
                                 start=(0.5, 0.5),
                                 finish=(0.5, 0.5))
    pset.execute(pset.Kernel(addConst), dt=1, runtime=1)
    assert abs(pset[0].lon - (0.5 + westval + eastval)) < 1e-4
Example #30
0
def test_advection_3D_outofbounds(mode):
    xdim = ydim = zdim = 2
    dimensions = {'lon': np.linspace(0., 1, xdim, dtype=np.float32),
                  'lat': np.linspace(0., 1, ydim, dtype=np.float32),
                  'depth': np.linspace(0., 1, zdim, dtype=np.float32)}
    data = {'U': np.zeros((xdim, ydim, zdim), dtype=np.float32),
            'V': np.zeros((xdim, ydim, zdim), dtype=np.float32),
            'W': np.ones((xdim, ydim, zdim), dtype=np.float32)}
    fieldset = FieldSet.from_data(data, dimensions, mesh='flat')

    def DeleteParticle(particle, fieldset, time):
        particle.delete()

    pset = ParticleSet(fieldset=fieldset, pclass=ptype[mode], lon=0.5, lat=0.5, depth=0.9)
    pset.execute(AdvectionRK4_3D, runtime=1., dt=1,
                 recovery={ErrorCode.ErrorOutOfBounds: DeleteParticle})