def test_variable_written_once(fieldset, mode, tmpdir, npart): filepath = tmpdir.join("pfile_once_written_variables.nc") def Update_v(particle, fieldset, time): particle.v_once += 1. particle.age += particle.dt class MyParticle(ptype[mode]): v_once = Variable('v_once', dtype=np.float64, initial=0., to_write='once') age = Variable('age', dtype=np.float32, initial=0.) lon = np.linspace(0, 1, npart) lat = np.linspace(1, 0, npart) time = np.arange(0, npart / 10., 0.1, dtype=np.float64) pset = ParticleSet(fieldset, pclass=MyParticle, lon=lon, lat=lat, time=time, v_once=time) ofile = pset.ParticleFile(name=filepath, outputdt=0.1) pset.execute(pset.Kernel(Update_v), endtime=1, dt=0.1, output_file=ofile) assert np.allclose(pset.particle_data['v_once'] - time - pset.particle_data['age'] * 10, 0, atol=1e-5) ncfile = close_and_compare_netcdffiles(filepath, ofile) vfile = np.ma.filled(ncfile.variables['v_once'][:], np.nan) assert (vfile.shape == (npart, )) assert np.allclose(vfile, time) ncfile.close()
def test_variable_written_once(fieldset, mode, tmpdir, npart): filepath = tmpdir.join("pfile_once_written_variables") def Update_v(particle, fieldset, time, dt): particle.v_once += 1. class MyParticle(ptype[mode]): v_once = Variable('v_once', dtype=np.float32, initial=1., to_write='once') lon = np.linspace(0, 1, npart, dtype=np.float32) lat = np.linspace(1, 0, npart, dtype=np.float32) pset = ParticleSet(fieldset, pclass=MyParticle, lon=lon, lat=lat) pset.execute(pset.Kernel(Update_v), endtime=1, dt=0.1, interval=0.2, output_file=pset.ParticleFile(name=filepath)) ncfile = Dataset(filepath + ".nc", 'r', 'NETCDF4') V_once = ncfile.variables['v_once'][:] assert np.all([p.v_once == 11.0 for p in pset]) assert (V_once.shape == (npart, )) assert (V_once[0] == 1.)
def test_meridionalflow_spherical(mode, xdim=100, ydim=200): """ Create uniform NORTHWARD flow on spherical earth and advect particles As flow is so simple, it can be directly compared to analytical solution """ maxvel = 1. dimensions = { 'lon': np.linspace(-180, 180, xdim, dtype=np.float32), 'lat': np.linspace(-90, 90, ydim, dtype=np.float32) } data = {'U': np.zeros([xdim, ydim]), 'V': maxvel * np.ones([xdim, ydim])} fieldset = FieldSet.from_data(data, dimensions, mesh='spherical', transpose=True) lonstart = [0, 45] latstart = [0, 45] runtime = delta(hours=24) pset = ParticleSet(fieldset, pclass=pclass(mode), lon=lonstart, lat=latstart) pset.execute(pset.Kernel(AdvectionRK4), runtime=runtime, dt=delta(hours=1)) assert (pset.lat[0] - (latstart[0] + runtime.total_seconds() * maxvel / 1852 / 60) < 1e-4) assert (pset.lon[0] - lonstart[0] < 1e-4) assert (pset.lat[1] - (latstart[1] + runtime.total_seconds() * maxvel / 1852 / 60) < 1e-4) assert (pset.lon[1] - lonstart[1] < 1e-4)
def test_from_netcdf_memory_containment(mode, time_periodic, chunksize, with_GC): if chunksize == 'auto': dask.config.set({'array.chunk-size': '2MiB'}) else: dask.config.set({'array.chunk-size': '128MiB'}) class PerformanceLog(): samples = [] memory_steps = [] _iter = 0 def advance(self): process = psutil.Process(os.getpid()) self.memory_steps.append(process.memory_info().rss) self.samples.append(self._iter) self._iter += 1 def perIterGC(): gc.collect() def periodicBoundaryConditions(particle, fieldset, time): while particle.lon > 180.: particle.lon -= 360. while particle.lon < -180.: particle.lon += 360. while particle.lat > 90.: particle.lat -= 180. while particle.lat < -90.: particle.lat += 180. process = psutil.Process(os.getpid()) mem_0 = process.memory_info().rss fnameU = path.join(path.dirname(__file__), 'test_data', 'perlinfieldsU.nc') fnameV = path.join(path.dirname(__file__), 'test_data', 'perlinfieldsV.nc') ufiles = [fnameU, ] * 4 vfiles = [fnameV, ] * 4 timestamps = np.arange(0, 4, 1) * 86400.0 timestamps = np.expand_dims(timestamps, 1) files = {'U': ufiles, 'V': vfiles} variables = {'U': 'vozocrtx', 'V': 'vomecrty'} dimensions = {'lon': 'nav_lon', 'lat': 'nav_lat'} fieldset = FieldSet.from_netcdf(files, variables, dimensions, timestamps=timestamps, time_periodic=time_periodic, allow_time_extrapolation=True if time_periodic in [False, None] else False, chunksize=chunksize) perflog = PerformanceLog() postProcessFuncs = [perflog.advance, ] if with_GC: postProcessFuncs.append(perIterGC) pset = ParticleSet(fieldset=fieldset, pclass=ptype[mode], lon=[0.5, ], lat=[0.5, ]) mem_0 = process.memory_info().rss mem_exhausted = False try: pset.execute(pset.Kernel(AdvectionRK4)+periodicBoundaryConditions, dt=delta(hours=1), runtime=delta(days=7), postIterationCallbacks=postProcessFuncs, callbackdt=delta(hours=12)) except MemoryError: mem_exhausted = True mem_steps_np = np.array(perflog.memory_steps) if with_GC: assert np.allclose(mem_steps_np[8:], perflog.memory_steps[-1], rtol=0.01) if (chunksize is not False or with_GC) and mode != 'scipy': assert np.alltrue((mem_steps_np-mem_0) < 4712832) # represents 4 x [U|V] * sizeof(field data) assert not mem_exhausted
def run_corefootprintparticles(outfile): snapshots = range(3165, 3288) fieldset = set_ofes_fieldset(snapshots) fieldset.add_constant('dwellingdepth', 50.) fieldset.add_constant('sinkspeed', 200. / 86400) fieldset.add_constant('maxage', 30. * 86400) corelon = [17.30] corelat = [-34.70] coredepth = [2440] class ForamParticle(JITParticle): temp = Variable('temp', dtype=np.float32, initial=fieldset.temp) age = Variable('age', dtype=np.float32, initial=0.) pset = ParticleSet( fieldset=fieldset, pclass=ForamParticle, lon=corelon, lat=corelat, depth=coredepth, time=fieldset.U.grid.time[-1], repeatdt=delta(days=3) ) # the new argument 'repeatdt' means no need to call pset.add() anymore in for-loop pfile = ParticleFile(outfile, pset, outputdt=delta( days=1)) # `interval` argument has changed to `outputdt` kernels = pset.Kernel(AdvectionRK4_3D) + Sink + SampleTemp + Age pset.execute(kernels, dt=delta(minutes=-5), output_file=pfile, recovery={ErrorCode.ErrorOutOfBounds: DeleteParticle})
def test_add_second_vector_field(mode): lon = np.linspace(0., 10., 12, dtype=np.float32) lat = np.linspace(0., 10., 10, dtype=np.float32) U = np.ones((10, 12), dtype=np.float32) V = np.zeros((10, 12), dtype=np.float32) data = {'U': U, 'V': V} dimensions = {'U': {'lat': lat, 'lon': lon}, 'V': {'lat': lat, 'lon': lon}} fieldset = FieldSet.from_data(data, dimensions, mesh='flat') data2 = {'U2': U, 'V2': V} dimensions2 = {'lon': [ln + 0.1 for ln in lon], 'lat': [lt - 0.1 for lt in lat]} fieldset2 = FieldSet.from_data(data2, dimensions2, mesh='flat') UV2 = VectorField('UV2', fieldset2.U2, fieldset2.V2) fieldset.add_vector_field(UV2) def SampleUV2(particle, fieldset, time): u, v = fieldset.UV2[time, particle.depth, particle.lat, particle.lon] particle.lon += u * particle.dt particle.lat += v * particle.dt pset = ParticleSet(fieldset, pclass=ptype[mode], lon=0.5, lat=0.5) pset.execute(AdvectionRK4+pset.Kernel(SampleUV2), dt=1, runtime=1) assert abs(pset.lon[0] - 2.5) < 1e-9 assert abs(pset.lat[0] - .5) < 1e-9
def test_globcurrent_particle_independence(mode, rundays=5): fieldset = set_globcurrent_fieldset() time0 = fieldset.U.grid.time[0] def DeleteP0(particle, fieldset, time): if particle.id == 0: return ErrorCode.ErrorOutOfBounds # we want to pass through recov loop def DeleteParticle(particle, fieldset, time): particle.delete() pset0 = ParticleSet(fieldset, pclass=JITParticle, lon=[25, 25], lat=[-35, -35], time=time0) pset0.execute(pset0.Kernel(DeleteP0) + AdvectionRK4, runtime=delta(days=rundays), dt=delta(minutes=5), recovery={ErrorCode.ErrorOutOfBounds: DeleteParticle}) pset1 = ParticleSet(fieldset, pclass=JITParticle, lon=[25, 25], lat=[-35, -35], time=time0) pset1.execute(AdvectionRK4, runtime=delta(days=rundays), dt=delta(minutes=5)) assert np.allclose([pset0[-1].lon, pset0[-1].lat], [pset1[-1].lon, pset1[-1].lat])
def test_variable_written_once(fieldset, mode, tmpdir, npart): filepath = tmpdir.join("pfile_once_written_variables") def Update_v(particle, fieldset, time, dt): particle.v_once += 1. particle.age += dt class MyParticle(ptype[mode]): v_once = Variable('v_once', dtype=np.float32, initial=0., to_write='once') age = Variable('age', dtype=np.float32, initial=0.) lon = np.linspace(0, 1, npart, dtype=np.float32) lat = np.linspace(1, 0, npart, dtype=np.float32) pset = ParticleSet(fieldset, pclass=MyParticle, lon=lon, lat=lat, repeatdt=0.1) pset.execute(pset.Kernel(Update_v), endtime=1, dt=0.1, output_file=pset.ParticleFile(name=filepath, outputdt=0.1)) assert np.allclose([p.v_once - p.age * 10 for p in pset], 0, atol=1e-5) ncfile = Dataset(filepath + ".nc", 'r', 'NETCDF4') vfile = ncfile.variables['v_once'][:] assert (vfile.shape == (npart * 11, )) assert [v == 0 for v in vfile]
def test_meridionalflow_sperical(mode, xdim=100, ydim=200): """ Create uniform NORTHWARD flow on sperical earth and advect particles As flow is so simple, it can be directly compared to analytical solution """ maxvel = 1. lon = np.linspace(-180, 180, xdim, dtype=np.float32) lat = np.linspace(-90, 90, ydim, dtype=np.float32) U = np.zeros([xdim, ydim]) V = maxvel * np.ones([xdim, ydim]) grid = Grid.from_data(np.array(U, dtype=np.float32), lon, lat, np.array(V, dtype=np.float32), lon, lat) lonstart = [0, 45] latstart = [0, 45] endtime = delta(hours=24) pset = ParticleSet(grid, pclass=pclass(mode), lon=lonstart, lat=latstart) pset.execute(pset.Kernel(AdvectionRK4), endtime=endtime, dt=delta(hours=1)) assert (pset[0].lat - (latstart[0] + endtime.total_seconds() * maxvel / 1852 / 60) < 1e-4) assert (pset[0].lon - lonstart[0] < 1e-4) assert (pset[1].lat - (latstart[1] + endtime.total_seconds() * maxvel / 1852 / 60) < 1e-4) assert (pset[1].lon - lonstart[1] < 1e-4)
def test_random_kernel_concat(fieldset, mode, concat): class TestParticle(ptype[mode]): p = Variable('p', dtype=np.float32) pset = ParticleSet(fieldset, pclass=TestParticle, lon=0, lat=0) def RandomKernel(particle, fieldset, time): particle.p += ParcelsRandom.uniform(0, 1) def AddOne(particle, fieldset, time): particle.p += 1. kernels = pset.Kernel(RandomKernel) + pset.Kernel( AddOne) if concat else RandomKernel pset.execute(kernels, dt=0) assert pset.p > 1 if concat else pset.p < 1
def test_advection_periodic_meridional(mode, xdim=100, ydim=100): fieldset = periodicfields(xdim, ydim, uvel=0., vvel=1.) fieldset.add_periodic_halo(meridional=True) assert(len(fieldset.U.lat) == ydim + 10) # default halo size is 5 grid points pset = ParticleSet(fieldset, pclass=ptype[mode], lon=[0.5], lat=[0.5]) pset.execute(AdvectionRK4 + pset.Kernel(periodicBC), runtime=delta(hours=20), dt=delta(seconds=30)) assert abs(pset[0].lat - 0.15) < 0.1
def test_advection_periodic_zonal(mode, xdim=100, ydim=100, halosize=3): fieldset = periodicfields(xdim, ydim, uvel=1., vvel=0.) fieldset.add_periodic_halo(zonal=True, halosize=halosize) assert(len(fieldset.U.lon) == xdim + 2 * halosize) pset = ParticleSet(fieldset, pclass=ptype[mode], lon=[0.5], lat=[0.5]) pset.execute(AdvectionRK4 + pset.Kernel(periodicBC), runtime=delta(hours=20), dt=delta(seconds=30)) assert abs(pset[0].lon - 0.15) < 0.1
def test_multi_kernel_reuse_varnames(fieldset, mode): # Testing for merging of two Kernels with the same variable declared # Should throw a warning, but go ahead regardless def MoveEast1(particle, fieldset, time, dt): add_lon = 0.2 particle.lon += add_lon def MoveEast2(particle, fieldset, time, dt): particle.lon += add_lon # NOQA - no flake8 testing of this line pset = ParticleSet(fieldset, pclass=ptype[mode], lon=[0.5], lat=[0.5]) pset.execute(pset.Kernel(MoveEast1) + pset.Kernel(MoveEast2), starttime=0., endtime=1., dt=1.) assert np.allclose([p.lon for p in pset], [0.9], rtol=1e-5) # should be 0.5 + 0.2 + 0.2 = 0.9
def test_multi_kernel_duplicate_varnames(fieldset, mode): # Testing for merging of two Kernels with the same variable declared # Should throw a warning, but go ahead regardless def MoveEast(particle, fieldset, time, dt): add_lon = 0.1 particle.lon += add_lon def MoveWest(particle, fieldset, time, dt): add_lon = -0.3 particle.lon += add_lon pset = ParticleSet(fieldset, pclass=ptype[mode], lon=[0.5], lat=[0.5]) pset.execute(pset.Kernel(MoveEast) + pset.Kernel(MoveWest), starttime=0., endtime=1., dt=1.) assert np.allclose([p.lon for p in pset], 0.3, rtol=1e-5)
def test_summedfields(mode, with_W, k_sample_p, mesh): xdim = 10 ydim = 20 zdim = 4 gf = 10 # factor by which the resolution of grid1 is higher than of grid2 U1 = Field('U', 0.2*np.ones((zdim*gf, ydim*gf, xdim*gf), dtype=np.float32), lon=np.linspace(0., 1., xdim*gf, dtype=np.float32), lat=np.linspace(0., 1., ydim*gf, dtype=np.float32), depth=np.linspace(0., 20., zdim*gf, dtype=np.float32), mesh=mesh) U2 = Field('U', 0.1*np.ones((zdim, ydim, xdim), dtype=np.float32), lon=np.linspace(0., 1., xdim, dtype=np.float32), lat=np.linspace(0., 1., ydim, dtype=np.float32), depth=np.linspace(0., 20., zdim, dtype=np.float32), mesh=mesh) V1 = Field('V', np.zeros((zdim*gf, ydim*gf, xdim*gf), dtype=np.float32), grid=U1.grid, fieldtype='V') V2 = Field('V', np.zeros((zdim, ydim, xdim), dtype=np.float32), grid=U2.grid, fieldtype='V') fieldsetS = FieldSet(U1+U2, V1+V2) conv = 1852*60 if mesh == 'spherical' else 1. assert np.allclose(fieldsetS.U[0, 0, 0, 0]*conv, 0.3) P1 = Field('P', 30*np.ones((zdim*gf, ydim*gf, xdim*gf), dtype=np.float32), grid=U1.grid) P2 = Field('P', 20*np.ones((zdim, ydim, xdim), dtype=np.float32), grid=U2.grid) P3 = Field('P', 10*np.ones((zdim, ydim, xdim), dtype=np.float32), grid=U2.grid) P4 = Field('P', 0*np.ones((zdim, ydim, xdim), dtype=np.float32), grid=U2.grid) fieldsetS.add_field((P1+P4)+(P2+P3), name='P') assert np.allclose(fieldsetS.P[0, 0, 0, 0], 60) if with_W: W1 = Field('W', 2*np.ones((zdim * gf, ydim * gf, xdim * gf), dtype=np.float32), grid=U1.grid) W2 = Field('W', np.ones((zdim, ydim, xdim), dtype=np.float32), grid=U2.grid) fieldsetS.add_field(W1+W2, name='W') pset = ParticleSet(fieldsetS, pclass=pclass(mode), lon=[0], lat=[0.9]) pset.execute(AdvectionRK4_3D+pset.Kernel(k_sample_p), runtime=2, dt=1) assert np.isclose(pset[0].depth, 6) else: pset = ParticleSet(fieldsetS, pclass=pclass(mode), lon=[0], lat=[0.9]) pset.execute(AdvectionRK4+pset.Kernel(k_sample_p), runtime=2, dt=1) assert np.isclose(pset[0].p, 60) assert np.isclose(pset[0].lon*conv, 0.6, atol=1e-3) assert np.isclose(pset[0].lat, 0.9) assert np.allclose(fieldsetS.UV[0][0, 0, 0, 0], [.2/conv, 0])
def test_pset_add_execute(fieldset, mode, npart=10): def AddLat(particle, fieldset, time, dt): particle.lat += 0.1 pset = ParticleSet(fieldset, lon=[], lat=[], pclass=ptype[mode]) for i in range(npart): pset += ptype[mode](lon=0.1, lat=0.1, fieldset=fieldset) for _ in range(3): pset.execute(pset.Kernel(AddLat), runtime=1., dt=1.0) assert np.allclose(np.array([p.lat for p in pset]), 0.4, rtol=1e-12)
def test_pset_remove_kernel(fieldset, mode, npart=100): def DeleteKernel(particle, fieldset, time): if particle.lon >= .4: particle.delete() pset = ParticleSet(fieldset, pclass=ptype[mode], lon=np.linspace(0, 1, npart, dtype=np.float32), lat=np.linspace(1, 0, npart, dtype=np.float32)) pset.execute(pset.Kernel(DeleteKernel), endtime=1., dt=1.0) assert(pset.size == 40)
def test_grid_sample_geographic(grid_geometric, mode, k_sample_uv, npart=120): """ Sample a grid with conversion to geographic units (degrees). """ grid = grid_geometric lon = np.linspace(-170, 170, npart, dtype=np.float32) lat = np.linspace(-80, 80, npart, dtype=np.float32) pset = ParticleSet(grid, pclass=pclass(mode), lon=lon, lat=np.zeros(npart, dtype=np.float32) + 70.) pset.execute(pset.Kernel(k_sample_uv), endtime=1., dt=1.) assert np.allclose(np.array([p.v for p in pset]), lon, rtol=1e-6) pset = ParticleSet(grid, pclass=pclass(mode), lat=lat, lon=np.zeros(npart, dtype=np.float32) - 45.) pset.execute(pset.Kernel(k_sample_uv), endtime=1., dt=1.) assert np.allclose(np.array([p.u for p in pset]), lat, rtol=1e-6)
def test_pset_add_execute(grid, mode, npart=10): def AddLat(particle, grid, time, dt): particle.lat += 0.1 pset = ParticleSet(grid, lon=[], lat=[], pclass=ptype[mode]) for i in range(npart): pset += ptype[mode](lon=0.1, lat=0.1, grid=grid) for _ in range(3): pset.execute(pset.Kernel(AddLat), starttime=0., endtime=1., dt=1.0) assert np.allclose(np.array([p.lat for p in pset]), 0.4, rtol=1e-12)
def test_globcurrent_startparticles_between_time_arrays( mode, dt, with_starttime): fieldset = set_globcurrent_fieldset() fnamesFeb = sorted( glob( path.join(path.dirname(__file__), 'GlobCurrent_example_data', '200202*.nc'))) fieldset.add_field( Field.from_netcdf(fnamesFeb, ('P', 'eastward_eulerian_current_velocity'), { 'lat': 'lat', 'lon': 'lon', 'time': 'time' })) class MyParticle(ptype[mode]): sample_var = Variable('sample_var', initial=0.) def SampleP(particle, fieldset, time): particle.sample_var += fieldset.P[time, particle.depth, particle.lat, particle.lon] if with_starttime: time = fieldset.U.grid.time[0] if dt > 0 else fieldset.U.grid.time[-1] pset = ParticleSet(fieldset, pclass=MyParticle, lon=[25], lat=[-35], time=time) else: pset = ParticleSet(fieldset, pclass=MyParticle, lon=[25], lat=[-35]) if with_starttime: with pytest.raises(TimeExtrapolationError): pset.execute(pset.Kernel(AdvectionRK4) + SampleP, runtime=delta(days=1), dt=dt) else: pset.execute(pset.Kernel(AdvectionRK4) + SampleP, runtime=delta(days=1), dt=dt)
def test_pset_execute_dt_0(fieldset, mode, endtime, dt, npart=2): def SetLat(particle, fieldset, time, dt): particle.lat = .6 lon = np.linspace(0, 1, npart, dtype=np.float32) lat = np.linspace(1, 0, npart, dtype=np.float32) pset = ParticleSet(fieldset, pclass=ptype[mode], lon=lon, lat=lat) pset.execute(pset.Kernel(SetLat), starttime=0., endtime=endtime, dt=dt) assert np.allclose([p.lon for p in pset], lon) assert np.allclose([p.lat for p in pset], .6) assert np.allclose([p.time for p in pset], 0)
def test_pset_multi_execute(fieldset, mode, npart=10, n=5): def AddLat(particle, fieldset, time): particle.lat += 0.1 pset = ParticleSet(fieldset, pclass=ptype[mode], lon=np.linspace(0, 1, npart, dtype=np.float32), lat=np.zeros(npart, dtype=np.float32)) k_add = pset.Kernel(AddLat) for _ in range(n): pset.execute(k_add, runtime=1., dt=1.0) assert np.allclose([p.lat - n*0.1 for p in pset], np.zeros(npart), rtol=1e-12)
def test_mitgridindexing(mode, gridindexingtype): xdim, ydim = 151, 201 a = b = 20000 # domain size lon = np.linspace(-a / 2, a / 2, xdim, dtype=np.float32) lat = np.linspace(-b / 2, b / 2, ydim, dtype=np.float32) dx, dy = lon[2] - lon[1], lat[2] - lat[1] omega = 2 * np.pi / delta(days=1).total_seconds() index_signs = {'nemo': -1, 'mitgcm': 1} isign = index_signs[gridindexingtype] def calc_r_phi(ln, lt): return np.sqrt(ln**2 + lt**2), np.arctan2(ln, lt) def calculate_UVR(lat, lon, dx, dy, omega): U = np.zeros((lat.size, lon.size), dtype=np.float32) V = np.zeros((lat.size, lon.size), dtype=np.float32) R = np.zeros((lat.size, lon.size), dtype=np.float32) for i in range(lon.size): for j in range(lat.size): r, phi = calc_r_phi(lon[i], lat[j]) R[j, i] = r r, phi = calc_r_phi(lon[i] + isign * dx / 2, lat[j]) V[j, i] = -omega * r * np.sin(phi) r, phi = calc_r_phi(lon[i], lat[j] + isign * dy / 2) U[j, i] = omega * r * np.cos(phi) return U, V, R U, V, R = calculate_UVR(lat, lon, dx, dy, omega) data = {'U': U, 'V': V, 'R': R} dimensions = {'lon': lon, 'lat': lat} fieldset = FieldSet.from_data(data, dimensions, mesh='flat', gridindexingtype=gridindexingtype) fieldset.U.interp_method = 'cgrid_velocity' fieldset.V.interp_method = 'cgrid_velocity' def UpdateR(particle, fieldset, time): particle.radius = fieldset.R[time, particle.depth, particle.lat, particle.lon] class MyParticle(ptype[mode]): radius = Variable('radius', dtype=np.float32, initial=0.) radius_start = Variable('radius_start', dtype=np.float32, initial=fieldset.R) pset = ParticleSet(fieldset, pclass=MyParticle, lon=0, lat=4e3, time=0) pset.execute(pset.Kernel(UpdateR) + AdvectionRK4, runtime=delta(hours=14), dt=delta(minutes=5)) assert np.allclose(pset.radius, pset.radius_start, atol=10)
def test_variable_write_double(fieldset, mode, tmpdir): filepath = tmpdir.join("pfile_variable_write_double") def Update_lon(particle, fieldset, time): particle.lon += 0.1 pset = ParticleSet(fieldset, pclass=JITParticle, lon=[0], lat=[0], lonlatdepth_dtype=np.float64) pset.execute(pset.Kernel(Update_lon), endtime=1, dt=0.1, output_file=pset.ParticleFile(name=filepath, outputdt=0.1)) ncfile = Dataset(filepath+".nc", 'r', 'NETCDF4') lons = ncfile.variables['lon'][:] assert (isinstance(lons[0, 0], np.float64))
def test_advection_periodic_zonal_meridional(mode, xdim=100, ydim=100): fieldset = periodicfields(xdim, ydim, uvel=1., vvel=1.) fieldset.add_periodic_halo(zonal=True, meridional=True) assert(len(fieldset.U.lat) == ydim + 10) # default halo size is 5 grid points assert(len(fieldset.U.lon) == xdim + 10) # default halo size is 5 grid points assert np.allclose(np.diff(fieldset.U.lat), fieldset.U.lat[1]-fieldset.U.lat[0], rtol=0.001) assert np.allclose(np.diff(fieldset.U.lon), fieldset.U.lon[1]-fieldset.U.lon[0], rtol=0.001) pset = ParticleSet(fieldset, pclass=ptype[mode], lon=[0.4], lat=[0.5]) pset.execute(AdvectionRK4 + pset.Kernel(periodicBC), runtime=delta(hours=20), dt=delta(seconds=30)) assert abs(pset[0].lon - 0.05) < 0.1 assert abs(pset[0].lat - 0.15) < 0.1
def test_execution_keep_cfiles_and_nocompilation_warnings(fieldset, delete_cfiles): pset = ParticleSet(fieldset, pclass=JITParticle, lon=[0.], lat=[0.]) pset.execute(pset.Kernel(AdvectionRK4, delete_cfiles=delete_cfiles), endtime=1., dt=1.) cfile = pset.kernel.src_file logfile = pset.kernel.log_file del pset.kernel if delete_cfiles: assert not path.exists(cfile) else: assert path.exists(cfile) with open(logfile) as f: assert 'warning' not in f.read(), 'Compilation WARNING in log file'
def test_c_kernel(fieldset, mode, c_inc): coord_type = np.float32 if c_inc == 'str' else np.float64 pset = ParticleSet(fieldset, pclass=ptype[mode], lon=[0.5], lat=[0], lonlatdepth_dtype=coord_type) def func(U, lon, dt): u = U.data[0, 2, 1] return lon + u * dt if c_inc == 'str': c_include = """ static inline StatusCode func(CField *f, float *lon, double *dt) { float data2D[2][2][2]; StatusCode status = getCell2D(f, 1, 2, 0, data2D, 1); CHECKSTATUS(status); float u = data2D[0][0][0]; *lon += u * *dt; return SUCCESS; } """ else: c_include = path.join(path.dirname(__file__), 'customed_header.h') def ckernel(particle, fieldset, time): func('parcels_customed_Cfunc_pointer_args', fieldset.U, particle.lon, particle.dt) def pykernel(particle, fieldset, time): particle.lon = func(fieldset.U, particle.lon, particle.dt) if mode == 'scipy': kernel = pset.Kernel(pykernel) else: kernel = pset.Kernel(ckernel, c_include=c_include) pset.execute(kernel, endtime=3., dt=3.) assert np.allclose(pset.lon[0], 0.81578948)
def test_fieldset_sample_geographic(fieldset_geometric, mode, k_sample_uv, npart=120): """ Sample a fieldset with conversion to geographic units (degrees). """ fieldset = fieldset_geometric lon = np.linspace(-170, 170, npart) lat = np.linspace(-80, 80, npart) pset = ParticleSet(fieldset, pclass=pclass(mode), lon=lon, lat=np.zeros(npart) + 70.) pset.execute(pset.Kernel(k_sample_uv), endtime=1., dt=1.) assert np.allclose(pset.v, lon, rtol=1e-6) pset = ParticleSet(fieldset, pclass=pclass(mode), lat=lat, lon=np.zeros(npart) - 45.) pset.execute(pset.Kernel(k_sample_uv), endtime=1., dt=1.) assert np.allclose(pset.u, lat, rtol=1e-6)
def test_fieldset_sample_particle(fieldset, mode, k_sample_uv, npart=120): """ Sample the fieldset using an array of particles. Note that the low tolerances (1.e-6) are due to the first-order interpolation in JIT mode and give an indication of the corresponding sampling error. """ lon = np.linspace(-170, 170, npart, dtype=np.float32) lat = np.linspace(-80, 80, npart, dtype=np.float32) pset = ParticleSet(fieldset, pclass=pclass(mode), lon=lon, lat=np.zeros(npart, dtype=np.float32) + 70.) pset.execute(pset.Kernel(k_sample_uv), endtime=1., dt=1.) assert np.allclose(np.array([p.v for p in pset]), lon, rtol=1e-6) pset = ParticleSet(fieldset, pclass=pclass(mode), lat=lat, lon=np.zeros(npart, dtype=np.float32) - 45.) pset.execute(pset.Kernel(k_sample_uv), endtime=1., dt=1.) assert np.allclose(np.array([p.u for p in pset]), lat, rtol=1e-6)
def test_errorcode_repeat(fieldset, mode): def simpleKernel(particle, fieldset, time): if particle.lon > .1 and time < 1.: # if particle.lon is not re-setted before kernel repetition, it will break here return ErrorCode.Error particle.lon += 0.1 if particle.dt > 1.49: # dt is used to leave the repetition loop (dt is the only variable not re-setted) return ErrorCode.Success particle.dt += .1 return ErrorCode.Repeat pset = ParticleSet(fieldset, pclass=ptype[mode], lon=[0.], lat=[0.]) pset.execute(pset.Kernel(simpleKernel), endtime=3., dt=1.)