def test_fieldset_defer_loading_with_diff_time_origin( tmpdir, fail, filename='test_parcels_defer_loading'): filepath = tmpdir.join(filename) data0, dims0 = generate_fieldset(10, 10, 1, 10) dims0['time'] = np.arange(0, 10, 1) * 3600 fieldset_out = FieldSet.from_data(data0, dims0) fieldset_out.U.grid.time_origin = TimeConverter( np.datetime64('2018-04-20')) fieldset_out.V.grid.time_origin = TimeConverter( np.datetime64('2018-04-20')) data1, dims1 = generate_fieldset(10, 10, 1, 10) if fail: dims1['time'] = np.arange(0, 10, 1) * 3600 else: dims1['time'] = np.arange(0, 10, 1) * 1800 + (24 + 25) * 3600 if fail: Wtime_origin = TimeConverter(np.datetime64('2018-04-22')) else: Wtime_origin = TimeConverter(np.datetime64('2018-04-18')) gridW = RectilinearZGrid(dims1['lon'], dims1['lat'], dims1['depth'], dims1['time'], time_origin=Wtime_origin) fieldW = Field('W', np.zeros(data1['U'].shape), grid=gridW) fieldset_out.add_field(fieldW) fieldset_out.write(filepath) fieldset = FieldSet.from_parcels(filepath, extra_fields={'W': 'W'}) assert fieldset.U.creation_log == 'from_parcels' pset = ParticleSet.from_list(fieldset, pclass=JITParticle, lon=[0.5], lat=[0.5], depth=[0.5], time=[datetime.datetime(2018, 4, 20, 1)]) pset.execute(AdvectionRK4_3D, runtime=delta(hours=4), dt=delta(hours=1))
def test_curvilinear_grids(mode): x = np.linspace(0, 1e3, 7, dtype=np.float32) y = np.linspace(0, 1e3, 5, dtype=np.float32) (xx, yy) = np.meshgrid(x, y) r = np.sqrt(xx * xx + yy * yy) theta = np.arctan2(yy, xx) theta = theta + np.pi / 6. lon = r * np.cos(theta) lat = r * np.sin(theta) time = np.array([0, 86400], dtype=np.float64) grid = CurvilinearZGrid(lon, lat, time=time) u_data = np.ones((2, y.size, x.size), dtype=np.float32) v_data = np.zeros((2, y.size, x.size), dtype=np.float32) u_data[0, :, :] = lon[:, :] + lat[:, :] u_field = Field('U', u_data, grid=grid, transpose=False) v_field = Field('V', v_data, grid=grid, transpose=False) field_set = FieldSet(u_field, v_field) def sampleSpeed(particle, fieldset, time): u = fieldset.U[time, particle.depth, particle.lat, particle.lon] v = fieldset.V[time, particle.depth, particle.lat, particle.lon] particle.speed = math.sqrt(u * u + v * v) class MyParticle(ptype[mode]): speed = Variable('speed', dtype=np.float32, initial=0.) pset = ParticleSet.from_list(field_set, MyParticle, lon=[400, -200], lat=[600, 600]) pset.execute(pset.Kernel(sampleSpeed), runtime=0, dt=0) assert (np.allclose(pset[0].speed, 1000))
""" Defining the particle set """ rho_pls = [ 30, 30, 30, 30, 30, 840, 840, 840, 840, 840, 920, 920, 920, 920, 920 ] # add/remove here if more needed r_pls = [ 1e-3, 1e-4, 1e-5, 1e-6, 1e-7, 1e-3, 1e-4, 1e-5, 1e-6, 1e-7, 1e-3, 1e-4, 1e-5, 1e-6, 1e-7 ] # add/remove here if more needed pset = ParticleSet.from_list( fieldset=fieldset, # the fields on which the particles are advected pclass= plastic_particle, # the type of particles (JITParticle or ScipyParticle) lon=lon_release, #-160., # a vector of release longitudes lat=lat_release, #36., time=np.datetime64('%s-%s-05' % (yr1, mon)), depth=z_release, r_pl=r_pls[0] * np.ones(np.array(lon_release).size), rho_pl=rho_pls[0] * np.ones(np.array(lon_release).size), r_tot=r_pls[0] * np.ones(np.array(lon_release).size), rho_tot=rho_pls[0] * np.ones(np.array(lon_release).size)) for r_pl, rho_pl in zip(r_pls[1:], rho_pls[1:]): pset.add( ParticleSet.from_list( fieldset=fieldset, # the fields on which the particles are advected pclass= plastic_particle, # the type of particles (JITParticle or ScipyParticle) lon=lon_release, #-160., # a vector of release longitudes lat=lat_release, #36., time=np.datetime64('%s-%s-05' % (yr1, mon)),
def test_periodic(mode, time_periodic, dt_sign): lon = np.array([0, 1], dtype=np.float32) lat = np.array([0, 1], dtype=np.float32) depth = np.array([0, 1], dtype=np.float32) tsize = 24 * 60 + 1 period = 86400 time = np.linspace(0, period, tsize, dtype=np.float64) def temp_func(time): return 20 + 2 * np.sin(time * 2 * np.pi / period) temp_vec = temp_func(time) U = np.zeros((2, 2, 2, tsize), dtype=np.float32) V = np.zeros((2, 2, 2, tsize), dtype=np.float32) V[0, 0, 0, :] = 1e-5 W = np.zeros((2, 2, 2, tsize), dtype=np.float32) temp = np.zeros((2, 2, 2, tsize), dtype=np.float32) temp[:, :, :, :] = temp_vec data = {'U': U, 'V': V, 'W': W, 'temp': temp} dimensions = {'lon': lon, 'lat': lat, 'depth': depth, 'time': time} fieldset = FieldSet.from_data(data, dimensions, mesh='flat', time_periodic=time_periodic, transpose=True, allow_time_extrapolation=True) def sampleTemp(particle, fieldset, time): # Note that fieldset.temp is interpolated at time=time+dt. # Indeed, sampleTemp is called at time=time, but the result is written # at time=time+dt, after the Kernel update particle.temp = fieldset.temp[time + particle.dt, particle.depth, particle.lat, particle.lon] # test if we can interpolate UV and UVW together (particle.u1, particle.v1) = fieldset.UV[time + particle.dt, particle.depth, particle.lat, particle.lon] (particle.u2, particle.v2, w_) = fieldset.UVW[time + particle.dt, particle.depth, particle.lat, particle.lon] class MyParticle(ptype[mode]): temp = Variable('temp', dtype=np.float32, initial=20.) u1 = Variable('u1', dtype=np.float32, initial=0.) u2 = Variable('u2', dtype=np.float32, initial=0.) v1 = Variable('v1', dtype=np.float32, initial=0.) v2 = Variable('v2', dtype=np.float32, initial=0.) pset = ParticleSet.from_list(fieldset, pclass=MyParticle, lon=[0.5], lat=[0.5], depth=[0.5]) pset.execute(AdvectionRK4_3D + pset.Kernel(sampleTemp), runtime=delta(hours=51), dt=delta(hours=dt_sign * 1)) if time_periodic is not False: t = pset.time[0] temp_theo = temp_func(t) elif dt_sign == 1: temp_theo = temp_vec[-1] elif dt_sign == -1: temp_theo = temp_vec[0] assert np.allclose(temp_theo, pset.temp[0], atol=1e-5) assert np.allclose(pset.u1[0], pset.u2[0]) assert np.allclose(pset.v1[0], pset.v2[0])
def test_multi_structured_grids(mode): def temp_func(lon, lat): return 20 + lat / 1000. + 2 * np.sin(lon * 2 * np.pi / 5000.) a = 10000 b = 10000 # Grid 0 xdim_g0 = 201 ydim_g0 = 201 # Coordinates of the test fieldset (on A-grid in deg) lon_g0 = np.linspace(0, a, xdim_g0, dtype=np.float32) lat_g0 = np.linspace(0, b, ydim_g0, dtype=np.float32) time_g0 = np.linspace(0., 1000., 2, dtype=np.float64) grid_0 = RectilinearZGrid(lon_g0, lat_g0, time=time_g0) # Grid 1 xdim_g1 = 51 ydim_g1 = 51 # Coordinates of the test fieldset (on A-grid in deg) lon_g1 = np.linspace(0, a, xdim_g1, dtype=np.float32) lat_g1 = np.linspace(0, b, ydim_g1, dtype=np.float32) time_g1 = np.linspace(0., 1000., 2, dtype=np.float64) grid_1 = RectilinearZGrid(lon_g1, lat_g1, time=time_g1) u_data = np.ones((lon_g0.size, lat_g0.size, time_g0.size), dtype=np.float32) u_data = 2 * u_data u_field = Field('U', u_data, grid=grid_0, transpose=True) temp0_data = np.empty((lon_g0.size, lat_g0.size, time_g0.size), dtype=np.float32) for i in range(lon_g0.size): for j in range(lat_g0.size): temp0_data[i, j, :] = temp_func(lon_g0[i], lat_g0[j]) temp0_field = Field('temp0', temp0_data, grid=grid_0, transpose=True) v_data = np.zeros((lon_g1.size, lat_g1.size, time_g1.size), dtype=np.float32) v_field = Field('V', v_data, grid=grid_1, transpose=True) temp1_data = np.empty((lon_g1.size, lat_g1.size, time_g1.size), dtype=np.float32) for i in range(lon_g1.size): for j in range(lat_g1.size): temp1_data[i, j, :] = temp_func(lon_g1[i], lat_g1[j]) temp1_field = Field('temp1', temp1_data, grid=grid_1, transpose=True) other_fields = {} other_fields['temp0'] = temp0_field other_fields['temp1'] = temp1_field field_set = FieldSet(u_field, v_field, fields=other_fields) def sampleTemp(particle, fieldset, time, dt): # Note that fieldset.temp is interpolated at time=time+dt. # Indeed, sampleTemp is called at time=time, but the result is written # at time=time+dt, after the Kernel update particle.temp0 = fieldset.temp0[time + dt, particle.lon, particle.lat, particle.depth] particle.temp1 = fieldset.temp1[time + dt, particle.lon, particle.lat, particle.depth] class MyParticle(ptype[mode]): temp0 = Variable('temp0', dtype=np.float32, initial=20.) temp1 = Variable('temp1', dtype=np.float32, initial=20.) pset = ParticleSet.from_list(field_set, MyParticle, lon=[3001], lat=[5001]) pset.execute(AdvectionRK4 + pset.Kernel(sampleTemp), runtime=1, dt=1) assert np.allclose(pset.particles[0].temp0, pset.particles[0].temp1, atol=1e-3)
def SampleTemp(particle, fieldset, time): particle.temp = fieldset.temp[time, particle.depth, particle.lat, particle.lon] def SampleBathy(particle, fieldset, time): particle.bathy = fieldset.bathy[0, 0, particle.lat, particle.lon] time0 = fieldset.U.grid.time[0] time = [time0] * len(lon) pset = ParticleSet.from_list(fieldset, pclass=SampleParticle, lon=lon, lat=lat, time=time, repeatdt=repeatdt) pfile = pset.ParticleFile(out_file, outputdt=delta(days=1)) kernels = pset.Kernel( AdvectionRK4 ) + SampleAge + SampleTemp + SampleBathy + SampleDistance + BrownianMotion2D pset.execute(kernels, dt=delta(minutes=5), output_file=pfile, verbose_progress=True, recovery={ErrorCode.ErrorOutOfBounds: DeleteParticle}, endtime=end_time)
def test_nemo_grid(mode): data_path = path.join(path.dirname(__file__), 'test_data/') mesh_filename = data_path + 'mask_nemo_cross_180lon.nc' rotation_angles_filename = data_path + 'rotation_angles_nemo_cross_180lon.nc' compute_curvilinearGrid_rotationAngles(mesh_filename, rotation_angles_filename) filenames = { 'U': data_path + 'Uu_eastward_nemo_cross_180lon.nc', 'V': data_path + 'Vv_eastward_nemo_cross_180lon.nc', 'cosU': rotation_angles_filename, 'sinU': rotation_angles_filename, 'cosV': rotation_angles_filename, 'sinV': rotation_angles_filename } variables = { 'U': 'U', 'V': 'V', 'cosU': 'cosU', 'sinU': 'sinU', 'cosV': 'cosV', 'sinV': 'sinV' } dimensions = { 'U': { 'lon': 'nav_lon_u', 'lat': 'nav_lat_u' }, 'V': { 'lon': 'nav_lon_v', 'lat': 'nav_lat_v' }, 'cosU': { 'lon': 'glamu', 'lat': 'gphiu' }, 'sinU': { 'lon': 'glamu', 'lat': 'gphiu' }, 'cosV': { 'lon': 'glamv', 'lat': 'gphiv' }, 'sinV': { 'lon': 'glamv', 'lat': 'gphiv' } } field_set = FieldSet.from_netcdf(filenames, variables, dimensions, mesh='spherical') def sampleVel(particle, fieldset, time, dt): (particle.zonal, particle.meridional) = fieldset.UV[time, particle.lon, particle.lat, particle.depth] class MyParticle(ptype[mode]): zonal = Variable('zonal', dtype=np.float32, initial=0.) meridional = Variable('meridional', dtype=np.float32, initial=0.) lonp = 175.5 latp = 81.5 pset = ParticleSet.from_list(field_set, MyParticle, lon=[lonp], lat=[latp]) pset.execute(pset.Kernel(sampleVel), runtime=0, dt=0) u = field_set.U.units.to_source(pset[0].zonal, lonp, latp, 0) v = field_set.V.units.to_source(pset[0].meridional, lonp, latp, 0) assert abs(u - 1) < 1e-4 assert abs(v) < 1e-4
def test_cgrid_uniform_3dvel_spherical(mode, vert_mode, time): data_path = path.join(path.dirname(__file__), 'test_data/') dim_file = xr.open_dataset(data_path + 'mask_nemo_cross_180lon.nc') u_file = xr.open_dataset(data_path + 'Uu_eastward_nemo_cross_180lon.nc') v_file = xr.open_dataset(data_path + 'Vv_eastward_nemo_cross_180lon.nc') j = 4 i = 11 lon = np.array(dim_file.glamf[0, j:j + 2, i:i + 2]) lat = np.array(dim_file.gphif[0, j:j + 2, i:i + 2]) U = np.array(u_file.U[0, j:j + 2, i:i + 2]) V = np.array(v_file.V[0, j:j + 2, i:i + 2]) trash = np.zeros((2, 2)) U = np.stack((U, trash)) V = np.stack((V, trash)) w0 = 1 w1 = 1 W = np.array([[[-99, -99], [-99, w0]], [[-99, -99], [-99, w1]]]) if vert_mode == 'zlev': depth = np.array([0, 1]) elif vert_mode == 'slev1': depth = np.array([[[0, 0], [0, 0]], [[1, 1], [1, 1]]]) if time: U = np.stack((U, U)) V = np.stack((V, V)) W = np.stack((W, W)) dimensions = { 'lat': lat, 'lon': lon, 'depth': depth, 'time': np.array([0, 10]) } else: dimensions = {'lat': lat, 'lon': lon, 'depth': depth} data = { 'U': np.array(U, dtype=np.float32), 'V': np.array(V, dtype=np.float32), 'W': np.array(W, dtype=np.float32) } fieldset = FieldSet.from_data(data, dimensions, mesh='spherical') fieldset.U.interp_method = 'cgrid_velocity' fieldset.V.interp_method = 'cgrid_velocity' fieldset.W.interp_method = 'cgrid_velocity' def sampleVel(particle, fieldset, time): (particle.zonal, particle.meridional, particle.vertical) = fieldset.UVW[time, particle.depth, particle.lat, particle.lon] class MyParticle(ptype[mode]): zonal = Variable('zonal', dtype=np.float32, initial=0.) meridional = Variable('meridional', dtype=np.float32, initial=0.) vertical = Variable('vertical', dtype=np.float32, initial=0.) lonp = 179.8 latp = 81.35 pset = ParticleSet.from_list(fieldset, MyParticle, lon=lonp, lat=latp, depth=.2) pset.execute(pset.Kernel(sampleVel), runtime=0, dt=0) pset[0].zonal = fieldset.U.units.to_source(pset[0].zonal, lonp, latp, 0) pset[0].meridional = fieldset.V.units.to_source(pset[0].meridional, lonp, latp, 0) assert abs(pset[0].zonal - 1) < 1e-3 assert abs(pset[0].meridional) < 1e-3 assert abs(pset[0].vertical - 1) < 1e-3
def test_popgrid(mode, vert_discretisation, deferred_load): mesh = path.join(path.join(path.dirname(__file__), 'test_data'), 'POPtestdata_time.nc') if vert_discretisation == 'zlevel': w_dep = 'w_dep' elif vert_discretisation == 'slevel': w_dep = 'w_deps' # same as zlevel, but defined as slevel elif vert_discretisation == 'slevel2': w_dep = 'w_deps2' # contains shaved cells filenames = mesh variables = {'U': 'U', 'V': 'V', 'W': 'W', 'T': 'T'} dimensions = {'lon': 'lon', 'lat': 'lat', 'depth': w_dep, 'time': 'time'} field_set = FieldSet.from_pop(filenames, variables, dimensions, mesh='flat', deferred_load=deferred_load) def sampleVel(particle, fieldset, time): (particle.zonal, particle.meridional, particle.vert) = fieldset.UVW[time, particle.depth, particle.lat, particle.lon] particle.tracer = fieldset.T[time, particle.depth, particle.lat, particle.lon] def OutBoundsError(particle, fieldset, time): particle.out_of_bounds = 1 particle.depth -= 3 class MyParticle(ptype[mode]): zonal = Variable('zonal', dtype=np.float32, initial=0.) meridional = Variable('meridional', dtype=np.float32, initial=0.) vert = Variable('vert', dtype=np.float32, initial=0.) tracer = Variable('tracer', dtype=np.float32, initial=0.) out_of_bounds = Variable('out_of_bounds', dtype=np.float32, initial=0.) pset = ParticleSet.from_list(field_set, MyParticle, lon=[3, 5, 1], lat=[3, 5, 1], depth=[3, 7, 11]) pset.execute(pset.Kernel(sampleVel), runtime=1, dt=1, recovery={ErrorCode.ErrorOutOfBounds: OutBoundsError}) if vert_discretisation == 'slevel2': assert np.isclose(pset.vert[0], 0.) assert np.isclose(pset.zonal[0], 0.) assert np.isclose(pset.tracer[0], 99.) assert np.isclose(pset.vert[1], -0.0066666666) assert np.isclose(pset.zonal[1], .015) assert np.isclose(pset.tracer[1], 1.) assert pset.out_of_bounds[0] == 0 assert pset.out_of_bounds[1] == 0 assert pset.out_of_bounds[2] == 1 else: assert np.allclose(pset.zonal, 0.015) assert np.allclose(pset.meridional, 0.01) assert np.allclose(pset.vert, -0.01) assert np.allclose(pset.tracer, 1)
def compute_swash_particle_advection(field_set, mode, lonp, latp, depthp): pset = ParticleSet.from_list(field_set, ptype[mode], lon=lonp, lat=latp, depth=depthp) pfile = ParticleFile("swash_particles_chunk", pset, outputdt=delta(seconds=0.05)) pset.execute(AdvectionRK4, runtime=delta(seconds=0.2), dt=delta(seconds=0.005), output_file=pfile) return pset
def compute_pop_particle_advection(field_set, mode, lonp, latp): pset = ParticleSet.from_list(field_set, ptype[mode], lon=lonp, lat=latp) pfile = ParticleFile("globcurrent_particles_chunk", pset, outputdt=delta(days=15)) pset.execute(AdvectionRK4, runtime=delta(days=90), dt=delta(days=2), output_file=pfile) return pset
def get_traj_with_parcels(date, runtime, delta_time, particle_grid_step, stream_data_fname): """Compute trajectories of particles in the sea using parcels library. Compute trajectories of particles in the sea at a given date, in a static 2D field of stream. Trajectories are saved in a .nc file. Args: date (int) : Day in number of days relatively to the data time origin at which the stream data should be taken. runtime (int) : Total duration in hours of the field integration. Trajectories length increases with the runtime. delta_time (int) : Time step in hours of the integration. particle_grid_step (int) : Grid step size for the initial positions of the particles. The unit is the data index step, ie data dx and dy. stream_data_fname (str) : Complete name of the stream data file. Returns: stream_line_list (list of classes.StreamLine) : The list of trajectories Notes: The input file is expected to contain the daily mean fields of east- and northward ocean current velocity (uo,vo) in a format as described here: http://marine.copernicus.eu/documents/PUM/CMEMS-GLO-PUM-001-024.pdf. """ # Loading data data_set = nc.Dataset(stream_data_fname) u_1day = data_set['uo'][date, 0, :] v_1day = data_set['vo'][date, 0, :] # Data sizes data_time_size, data_depth_size, data_lat_size, data_lon_size = np.shape( data_set['uo']) longitudes = np.array(data_set['longitude']) latitudes = np.array(data_set['latitude']) # Replace the mask (ie the ground areas) with a null vector field. U = np.array(u_1day) U[U == -32767] = 0 V = np.array(v_1day) V[V == -32767] = 0 # Initialize a field set using the data set. data = {'U': U, 'V': V} dimensions = {'lon': longitudes, 'lat': latitudes} fieldset = FieldSet.from_data(data, dimensions, mesh='spherical') fieldset.U.interp_method = 'cgrid_velocity' fieldset.V.interp_method = 'cgrid_velocity' # List of initial positions of the particles. Particles on the ground are # removed. init_pos = [] for lon in range(0, data_lon_size, particle_grid_step): for lat in range(0, data_lat_size, particle_grid_step): if not u_1day[lat, lon] is np.ma.masked: init_pos.append([longitudes[lon], latitudes[lat]]) init_pos = np.array(init_pos) init_longitudes = init_pos[:, 0] init_latitudes = init_pos[:, 1] # Initialize particle set pSet = ParticleSet.from_list(fieldset, ScipyParticle, init_longitudes, init_latitudes, depth=None, time=None) # Initialize output file and run simulation def DeleteParticle(particle, fieldset, time): particle.delete() output = pSet.ParticleFile(name="trajectories_temp.nc", outputdt=delta(hours=delta_time)) pSet.execute(AdvectionRK4, runtime=delta(hours=runtime), dt=np.inf, output_file=output, recovery={ErrorCode.ErrorOutOfBounds: DeleteParticle}) output.close() # Load simulation results and create the stream line list nc_traj = nc.Dataset("trajectories_temp.nc") trajectories = np.zeros( (nc_traj.dimensions['traj'].size, nc_traj.dimensions['obs'].size, 2)) trajectories[:, :, 0] = np.array(nc_traj['lon']) trajectories[:, :, 1] = np.array(nc_traj['lat']) stream_line_list = [] for trajectory in trajectories: stream_line_list.append( StreamLine(trajectory[np.isfinite(trajectory[:, 0])], delta_time * 3600)) # Clean working dir os.system("rm -r trajectories_temp*") return stream_line_list
prevlon = Variable('prevlon', dtype=np.float64, initial=0., to_write=False) prevlat = Variable('prevlat', dtype=np.float64, initial=0., to_write=False) prevdep = Variable('prevdep', dtype=np.float64, initial=0., to_write=False) if options['hdiffusion']==False and options['vdiffusion']==False: npart=1 lon0=np.zeros(npart*len(options['x0']));lat0=np.zeros(npart*len(options['x0']));depth0=np.zeros(npart*len(options['x0'])); for i in range(0,len(options['x0'])): lon0[i*npart:(i+1)*npart] = options['x0'][i] lat0[i*npart:(i+1)*npart] = options['y0'][i] depth0[i*npart:(i+1)*npart] = options['z0'][i] repeatdt=delta(minutes=options['repeatdt']) # release particles every... pset = ParticleSet.from_list(fieldset=fieldset, pclass=ForCoastParticle, lon=lon0, lat=lat0, depth=depth0, time=np.datetime64(options['sdate']), repeatdt=repeatdt, lonlatdepth_dtype=np.float64 ) # CHOOSE KERNELS if options['stokes']: kernels = pset.Kernel(Dbl_AdvectionRK4_3D_clumsy) if options['run3D'] else pset.Kernel(Dbl_AdvectionRK4) else: kernels = pset.Kernel(AdvectionRK4_3D) if options['run3D'] else pset.Kernel(AdvectionRK4, delete_cfiles=False) if options['hdiffusion']: print('Using horizontal diffusion') kernels += DiffusionUniformKh if options['run3D'] and options['vdiffusion']: print('Using vertical diffusion')
def parcels_particle_run(k, v): L.info(f'Received Input: {v}') L.info("Loading vectors from model...") datapath = Path(__file__).parent.parent / 'data' with xr.open_mfdataset(str(datapath / 'ncom_socal' / 'socal_2017-01-0[1-9].nc'), decode_cf=False) as undecoded: time_units = undecoded.time.units time_calendar = undecoded.time.calendar with xr.open_mfdataset( str(datapath / 'ncom_socal' / 'socal_2017-01-0[1-9].nc')) as ds: uv = ds[['water_u', 'water_v']] uv = uv.isel(depth=0) variables = {'U': 'water_u', 'V': 'water_v'} dimensions = {'lat': 'lat', 'lon': 'lon', 'time': 'time'} xr_fieldset = FieldSet.from_xarray_dataset( uv, variables, dimensions, allow_time_extrapolation=True) L.info("Created Fieldset") expanded_rows = [] df = pd.DataFrame.from_records(v['records']) for ix, row in df.iterrows(): expanded_rows.append( pd.concat([row] * int(row.nparticles), ignore_index=True, axis=1).T) expanded_df = pd.concat(expanded_rows, axis=0).reset_index() expanded_df = expanded_df.drop(columns=['index', 'nparticles']) L.info(f"Expanded and Loaded {len(expanded_df)} Particles!") L.info("Converting particle release time to the model base unit...") expanded_df.time = cftime.date2num(pd.DatetimeIndex( expanded_df.time).to_pydatetime(), units=time_units, calendar=time_calendar) csv_pset = ParticleSet.from_list(fieldset=xr_fieldset, pclass=JITParticle, lon=expanded_df.lon, lat=expanded_df.lat, time=expanded_df.time.values) L.info("Created ParticleSet") runlength = timedelta(days=6) timestep = timedelta(minutes=5) rigthnow = datetime.utcnow() output_file = datapath / 'particle_runs' / f'{v["id"]}_{rigthnow:%Y%m%dT%H%M%S.%f}.nc' output_file.parent.mkdir(exist_ok=True, parents=True) output = csv_pset.ParticleFile(name=str(output_file), outputdt=timedelta(hours=3)) L.info("Running...") csv_pset.execute(AdvectionRK4, runtime=runlength, dt=timestep, output_file=output) L.info(f'Complete! Saved output: {output_file}') kafka_base = 'kafka-int' p = EasyAvroProducer(schema_registry_url=f'http://{kafka_base}:7002', kafka_brokers=[f'{kafka_base}:7001'], kafka_topic='mil-darpa-oot-particle-completed', key_schema='nokey') to_send = [(None, {'id': v['id'], 'filepath': str(output_file)})] p.produce(to_send) L.info("Sent simulation completed message")
def deleteParticle(particle, fieldset, time): # Recovery kernel to delete a particle if an error occurs particle.delete() ############################################################################## # INITIALISE SIMULATION AND RUN # ############################################################################## pset = ParticleSet.from_list(fieldset=fieldset, pclass=debris, lonlatdepth_dtype=np.float64, lon=particles['pos']['lon'], lat=particles['pos']['lat'], time=particles['pos']['time'], rp0=particles['pos']['rp0'], cp0=particles['pos']['cp0'], source_id=particles['pos']['iso'], source_cell=particles['pos']['id']) print(str(len(particles['pos']['time'])) + ' particles released!') if param['test']: traj = pset.ParticleFile(name=fh['traj'], outputdt=param['dt_out']) else: traj = pset.ParticleFile(name=fh['traj'], write_ondelete=True) kernels = (pset.Kernel(AdvectionRK4) + pset.Kernel(periodicBC) + pset.Kernel(antibeach) + pset.Kernel(event))
def test_cgrid_uniform_3dvel(mode, vert_mode, time): lon = np.array([[0, 2], [.4, 1.5]]) lat = np.array([[0, -.5], [.8, .5]]) u0 = 4.4721359549995793e-01 u1 = 1.3416407864998738e+00 v0 = 1.2126781251816650e+00 v1 = 1.2278812270298409e+00 w0 = 1 w1 = 1 if vert_mode == 'zlev': depth = np.array([0, 1]) elif vert_mode == 'slev1': depth = np.array([[[0, 0], [0, 0]], [[1, 1], [1, 1]]]) elif vert_mode == 'slev2': depth = np.array([[[-1, -.6], [-1.1257142857142859, -.9]], [[1, 1.5], [0.50857142857142845, .8]]]) w0 = 1.0483007922296661e+00 w1 = 1.3098951476312375e+00 U = np.array([[[-99, -99], [u0, u1]], [[-99, -99], [-99, -99]]]) V = np.array([[[-99, v0], [-99, v1]], [[-99, -99], [-99, -99]]]) W = np.array([[[-99, -99], [-99, w0]], [[-99, -99], [-99, w1]]]) if time: U = np.stack((U, U)) V = np.stack((V, V)) W = np.stack((W, W)) dimensions = { 'lat': lat, 'lon': lon, 'depth': depth, 'time': np.array([0, 10]) } else: dimensions = {'lat': lat, 'lon': lon, 'depth': depth} data = { 'U': np.array(U, dtype=np.float32), 'V': np.array(V, dtype=np.float32), 'W': np.array(W, dtype=np.float32) } fieldset = FieldSet.from_data(data, dimensions, mesh='flat') fieldset.U.interp_method = 'cgrid_velocity' fieldset.V.interp_method = 'cgrid_velocity' fieldset.W.interp_method = 'cgrid_velocity' def sampleVel(particle, fieldset, time): (particle.zonal, particle.meridional, particle.vertical) = fieldset.UVW[time, particle.depth, particle.lat, particle.lon] class MyParticle(ptype[mode]): zonal = Variable('zonal', dtype=np.float32, initial=0.) meridional = Variable('meridional', dtype=np.float32, initial=0.) vertical = Variable('vertical', dtype=np.float32, initial=0.) pset = ParticleSet.from_list(fieldset, MyParticle, lon=.7, lat=.3, depth=.2) pset.execute(pset.Kernel(sampleVel), runtime=0, dt=0) assert abs(pset[0].zonal - 1) < 1e-6 assert abs(pset[0].meridional - 1) < 1e-6 assert abs(pset[0].vertical - 1) < 1e-6
#field_set.U.show() # Make particles initial position list nc_fid = Dataset(grid_file, 'r') #open grid file nc to read #lats = nc_fid.variables['nav_lat'][:] # extract/copy the data #lons = nc_fid.variables['nav_lon'][:] #lonE=lons[:,169-3] #latE=lats[:,169-3] npart = 30 lonp = [i for i in np.linspace(-88.18, -88.20, npart)] latp = [i for i in np.linspace(17.52, 17.53, npart)] #this makes a list! pset = ParticleSet.from_list(field_set, JITParticle, lon=lonp, lat=latp) pfile = ParticleFile("tBelize_nemo_particles_30halfD", pset, outputdt=delta(hours=0.5)) kernels = pset.Kernel(AdvectionRK4) #Plot initial positions #pset.show() pset.execute(kernels, runtime=delta(days=0.5), dt=delta(hours=0.1), output_file=pfile) #plotTrajectoriesFile("Belize_nemo_particles_t2.nc"); #pset.show(domain={'N':-31, 'S':-35, 'E':33, 'W':26}) #pset.show(field=field_set.U) #pset.show(field=fieldset.U, show_time=datetime(2002, 1, 10, 2)) #pset.show(field=fieldset.U, show_time=datetime(2002, 1, 10, 2), with_particles=False) end = time.time()
pospartic[0, 0] = -13.072 for i in range(1, numpart): if numpart == 1: break pospartic[i, 0] = pospartic[0, 0] - i * 0.02 pospartic[:, 1] = -38.45 return pospartic pospartic = criaPosPartic() #definindo partículas# pset = ParticleSet.from_list(fieldset=fset, pclass=JITParticle, lat=pospartic[:, 0], lon=pospartic[:, 1], repeatdt=timedelta(hours=12)) ############################################################################# ############################## Criando Output ############################### ############################################################################# #criando o que fazer em condição de recovery# def OutOfBounds(particle, fieldset, time): particle.delete() output_file = pset.ParticleFile(name=outname, outputdt=timedelta(seconds=3600))
fieldset = FieldSet(U=fieldset_ocean.U + fieldset_wave.U, V=fieldset_ocean.V + fieldset_wave.V) def deleteParticle(particle, fieldset, time): # Recovery kernel to delete a particle if it leaves the domain # (possible in certain configurations) particle.delete() ############################################################################## # INITIALISE SIMULATION AND RUN # ############################################################################## pset = ParticleSet.from_list(fieldset=fieldset, pclass=JITParticle, lon=lon0, lat=lat0, time=t0) traj = pset.ParticleFile(name=fh['traj'], outputdt=param['dt_out']) kernels = (pset.Kernel(AdvectionRK4)) pset.execute(kernels, endtime=param['endtime'], dt=param['dt_RK4'], recovery={ErrorCode.ErrorOutOfBounds: deleteParticle}, output_file=traj) traj.export()
def test_advect_nemo(mode): data_path = path.join(path.dirname(__file__), 'test_data/') mesh_filename = data_path + 'mask_nemo_cross_180lon.nc' rotation_angles_filename = data_path + 'rotation_angles_nemo_cross_180lon.nc' variables = { 'cosU': 'cosU', 'sinU': 'sinU', 'cosV': 'cosV', 'sinV': 'sinV' } dimensions = { 'U': { 'lon': 'glamu', 'lat': 'gphiu' }, 'V': { 'lon': 'glamv', 'lat': 'gphiv' }, 'F': { 'lon': 'glamf', 'lat': 'gphif' } } compute_curvilinearGrid_rotationAngles(mesh_filename, rotation_angles_filename, variables, dimensions) filenames = { 'U': data_path + 'Uu_eastward_nemo_cross_180lon.nc', 'V': data_path + 'Vv_eastward_nemo_cross_180lon.nc', 'cosU': rotation_angles_filename, 'sinU': rotation_angles_filename, 'cosV': rotation_angles_filename, 'sinV': rotation_angles_filename } variables = { 'U': 'U', 'V': 'V', 'cosU': 'cosU', 'sinU': 'sinU', 'cosV': 'cosV', 'sinV': 'sinV' } dimensions = { 'U': { 'lon': 'nav_lon_u', 'lat': 'nav_lat_u' }, 'V': { 'lon': 'nav_lon_v', 'lat': 'nav_lat_v' }, 'cosU': { 'lon': 'glamu', 'lat': 'gphiu' }, 'sinU': { 'lon': 'glamu', 'lat': 'gphiu' }, 'cosV': { 'lon': 'glamv', 'lat': 'gphiv' }, 'sinV': { 'lon': 'glamv', 'lat': 'gphiv' } } field_set = FieldSet.from_netcdf(filenames, variables, dimensions, mesh='spherical', allow_time_extrapolation=True) lonp = 175.5 latp = 81.5 pset = ParticleSet.from_list(field_set, ptype[mode], lon=[lonp], lat=[latp]) pset.execute(AdvectionRK4, runtime=delta(days=2), dt=delta(hours=6)) assert abs(pset[0].lat - latp) < 1e-3
particle.delete() recovery = { ErrorCode.ErrorOutOfBounds: DeleteParticle, ErrorCode.ErrorThroughSurface: DeleteParticle } #cover domain in particles lon = np.linspace(ds.lon_rho.min(), ds.lon_rho.max(), num=2**6) lat = np.linspace(ds.lat_rho.min(), ds.lat_rho.max(), num=2**6) lons, lats = np.meshgrid(lon, lat) pset = ParticleSet.from_list(fieldset=fieldset, pclass=JITParticle, time=ds.ocean_time.values[0], lon=lons, lat=lats, depth=depth) kernels = AdvectionRK4 output_file = pset.ParticleFile(name="/work/wtorres/temp/reefParticles", outputdt=delta(seconds=60)) pset.execute(kernels, runtime=delta(hours=12.0), dt=delta(seconds=60), output_file=output_file, recovery=recovery) output_file.export() output_file.close()
def deleteParticle(particle, fieldset, time): # Recovery kernel to delete a particle if an error occurs particle.delete() ############################################################################## # INITIALISE SIMULATION AND RUN # ############################################################################## pset = ParticleSet.from_list(fieldset=fieldset, pclass=debris, lonlatdepth_dtype=np.float64, lon=particles['pos']['lon'], lat=particles['pos']['lat'], time=particles['pos']['time'], gfw_num=particles['pos']['gfw'], lon0=particles['pos']['lon'], lat0=particles['pos']['lat']) print(str(len(particles['pos']['time'])) + ' particles released!') if param['test']: traj = pset.ParticleFile(name=fh['traj'], outputdt=param['dt_out']) else: traj = pset.ParticleFile(name=fh['traj'], write_ondelete=True) kernels = (pset.Kernel(AdvectionRK4) + pset.Kernel(periodicBC) + pset.Kernel(antibeach) + pset.Kernel(event))
xsi*eta * latCorners[2] + (1-xsi)*eta * latCorners[3] lonCorners = [1.37941658, 1.63887346, 1.67183721, 1.41217935] latCorners = [51.58309555, 51.56196213, 51.71636581, 51.73773575] lon_t = (1-xsi)*(1-eta) * lonCorners[0] + xsi*(1-eta) * lonCorners[1] + \ xsi*eta * lonCorners[2] + (1-xsi)*eta * lonCorners[3] lat_t = (1-xsi)*(1-eta) * latCorners[0] + xsi*(1-eta) * latCorners[1] + \ xsi*eta * latCorners[2] + (1-xsi)*eta * latCorners[3] lons = np.concatenate((lon_r.flatten(), lon_t.flatten())) lats = np.concatenate((lat_r.flatten(), lat_t.flatten())) times = np.arange(np.datetime64('2000-01-05'), np.datetime64('2001-01-05')) pset = ParticleSet.from_list(fieldset, PlasticParticle, lon=np.tile(lons, [len(times)]), lat=np.tile(lats, [len(times)]), time=np.repeat(times, len(lons))) kernel = AdvectionRK4 #+ pset.Kernel(Ageing) new_write = True timer.particlefile = timer.Timer('ParticleFile', parent=timer.root) outfile = './' + __file__[:-3] pfile = ParticleFile(outfile, pset) if new_write: pfile.write_pickle_per_tstep(pset, pset[0].time) else: pfile.write(pset, pset[0].time)
def test_rectilinear_s_grid_sampling(mode, z4d): lon_g0 = np.linspace(-3e4, 3e4, 61, dtype=np.float32) lat_g0 = np.linspace(0, 1000, 2, dtype=np.float32) time_g0 = np.linspace(0, 1000, 2, dtype=np.float64) if z4d: depth_g0 = np.zeros((time_g0.size, 5, lat_g0.size, lon_g0.size), dtype=np.float32) else: depth_g0 = np.zeros((5, lat_g0.size, lon_g0.size), dtype=np.float32) def bath_func(lon): bath = (lon <= -2e4) * 20. bath += (lon > -2e4) * (lon < 2e4) * ( 110. + 90 * np.sin(lon / 2e4 * np.pi / 2.)) bath += (lon >= 2e4) * 200. return bath bath = bath_func(lon_g0) zdim = depth_g0.shape[-3] for i in range(depth_g0.shape[-1]): for k in range(zdim): if z4d: depth_g0[:, k, :, i] = bath[i] * k / (zdim - 1) else: depth_g0[k, :, i] = bath[i] * k / (zdim - 1) grid = RectilinearSGrid(lon_g0, lat_g0, depth=depth_g0, time=time_g0) u_data = np.zeros((grid.tdim, grid.zdim, grid.ydim, grid.xdim), dtype=np.float32) v_data = np.zeros((grid.tdim, grid.zdim, grid.ydim, grid.xdim), dtype=np.float32) temp_data = np.zeros((grid.tdim, grid.zdim, grid.ydim, grid.xdim), dtype=np.float32) for k in range(1, zdim): temp_data[:, k, :, :] = k / (zdim - 1.) u_field = Field('U', u_data, grid=grid) v_field = Field('V', v_data, grid=grid) temp_field = Field('temp', temp_data, grid=grid) other_fields = {} other_fields['temp'] = temp_field field_set = FieldSet(u_field, v_field, fields=other_fields) def sampleTemp(particle, fieldset, time, dt): particle.temp = fieldset.temp[time, particle.lon, particle.lat, particle.depth] class MyParticle(ptype[mode]): temp = Variable('temp', dtype=np.float32, initial=20.) lon = 400 lat = 0 ratio = .3 pset = ParticleSet.from_list(field_set, MyParticle, lon=[lon], lat=[lat], depth=[bath_func(lon) * ratio]) pset.execute(pset.Kernel(sampleTemp), runtime=0, dt=0) assert np.allclose(pset.particles[0].temp, ratio, atol=1e-4)
timer.fieldset = timer.Timer('FieldSet', parent=timer.root) field_set = get_cmems_fieldset(2013) timer.fieldset.stop() kernel = AdvectionRK4 time0 = field_set.U.grid.time[0] lonv = np.arange(-6, 10.1, .2) latv = np.arange(50, 63, .2) lon, lat = np.meshgrid(lonv, latv) lon = lon.flatten() lat = lat.flatten() time = time0 * np.ones(lon.shape) pset = ParticleSet.from_list(field_set, JITParticle, lon=lon, lat=lat, time=time) kernel = AdvectionRK4 timer.particlefile = timer.Timer('ParticleFile', parent=timer.root) outfile = __file__[:-3] pfile = ParticleFile(outfile, pset) pfile.write(pset, pset[0].time) timer.particlefile.stop() tic = timelib.time() ndays = 60 timer.run = timer.Timer('Execution', parent=timer.root, start=False) for d in range(ndays): print('running %d / %d: time %g' % (d + 1, ndays, timelib.time() - tic)) timer.run.start() pset.execute(