def test_moving_eddies_fwdbwd(mode, mesh, npart=2):
    method = AdvectionRK4
    fieldset = moving_eddies_fieldset(mesh=mesh)

    # Determine particle class according to mode
    lons = [3.3, 3.3
            ] if fieldset.U.grid.mesh == 'spherical' else [3.3e5, 3.3e5]
    lats = [46., 47.8] if fieldset.U.grid.mesh == 'spherical' else [1e5, 2.8e5]
    pset = ParticleSet(fieldset=fieldset,
                       pclass=ptype[mode],
                       lon=lons,
                       lat=lats)

    # Execte for 14 days, with 30sec timesteps and hourly output
    runtime = delta(days=1)
    dt = delta(minutes=5)
    outputdt = delta(hours=1)
    print("MovingEddies: Advecting %d particles for %s" %
          (npart, str(runtime)))
    pset.execute(method,
                 runtime=runtime,
                 dt=dt,
                 output_file=pset.ParticleFile(name="EddyParticlefwd",
                                               outputdt=outputdt))

    print("Now running in backward time mode")
    pset.execute(method,
                 endtime=0,
                 dt=-dt,
                 output_file=pset.ParticleFile(name="EddyParticlebwd",
                                               outputdt=outputdt))

    assert np.allclose([p.lon for p in pset], lons)
    assert np.allclose([p.lat for p in pset], lats)
    return pset
Beispiel #2
0
def create_outputfiles(dir):
    datafile = path.join(path.dirname(__file__), 'test_data', 'testfields')

    fieldset = FieldSet.from_parcels(datafile, allow_time_extrapolation=True)
    pset = ParticleSet(fieldset=fieldset, lon=[], lat=[], pclass=JITParticle)
    npart = 10
    delaytime = delta(hours=1)
    endtime = delta(hours=24)
    x = 3. * (1. / 1.852 / 60)
    y = (fieldset.U.lat[0] + x, fieldset.U.lat[-1] - x)
    lat = np.linspace(y[0], y[1], npart)

    fp = dir.join("DelayParticle.nc")
    output_file = pset.ParticleFile(name=fp, outputdt=delaytime)

    for t in range(npart):
        pset.add(JITParticle(lon=x, lat=lat[t], fieldset=fieldset))
        pset.execute(AdvectionRK4,
                     runtime=delaytime,
                     dt=delta(minutes=5),
                     output_file=output_file)

    pset.execute(AdvectionRK4,
                 runtime=endtime - npart * delaytime,
                 dt=delta(minutes=5),
                 output_file=output_file)

    return fp
Beispiel #3
0
def test_pset_repeated_release_delayed_adding_deleting(type, fieldset, mode, repeatdt, tmpdir, dt, maxvar, runtime=10):
    fieldset.maxvar = maxvar

    class MyParticle(ptype[mode]):
        sample_var = Variable('sample_var', initial=0.)
    if type == 'repeatdt':
        pset = ParticleSet(fieldset, lon=[0], lat=[0], pclass=MyParticle, repeatdt=repeatdt)
    elif type == 'timearr':
        pset = ParticleSet(fieldset, lon=np.zeros(runtime), lat=np.zeros(runtime), pclass=MyParticle, time=list(range(runtime)))
    outfilepath = tmpdir.join("pfile_repeated_release")
    pfile = pset.ParticleFile(outfilepath, outputdt=abs(dt))

    def IncrLon(particle, fieldset, time):
        particle.sample_var += 1.
        if particle.sample_var > fieldset.maxvar:
            particle.delete()
    for i in range(runtime):
        pset.execute(IncrLon, dt=dt, runtime=1., output_file=pfile)
    ncfile = Dataset(outfilepath+".nc", 'r', 'NETCDF4')
    samplevar = ncfile.variables['sample_var'][:]
    ncfile.close()
    if type == 'repeatdt':
        assert samplevar.shape == (runtime // repeatdt+1, min(maxvar+1, runtime)+1)
        assert np.allclose([p.sample_var for p in pset], np.arange(maxvar, -1, -repeatdt))
    elif type == 'timearr':
        assert samplevar.shape == (runtime, min(maxvar + 1, runtime) + 1)
    # test whether samplevar[:, k] = k
    for k in range(samplevar.shape[1]):
        assert np.allclose([p for p in samplevar[:, k] if np.isfinite(p)], k)
    filesize = os.path.getsize(str(outfilepath+".nc"))
    assert filesize < 1024 * 65  # test that chunking leads to filesize less than 65KB
Beispiel #4
0
def test_variable_written_once(fieldset, mode, tmpdir, npart):
    filepath = tmpdir.join("pfile_once_written_variables")

    def Update_v(particle, fieldset, time, dt):
        particle.v_once += 1.
        particle.age += dt

    class MyParticle(ptype[mode]):
        v_once = Variable('v_once',
                          dtype=np.float32,
                          initial=0.,
                          to_write='once')
        age = Variable('age', dtype=np.float32, initial=0.)

    lon = np.linspace(0, 1, npart, dtype=np.float32)
    lat = np.linspace(1, 0, npart, dtype=np.float32)
    pset = ParticleSet(fieldset,
                       pclass=MyParticle,
                       lon=lon,
                       lat=lat,
                       repeatdt=0.1)
    pset.execute(pset.Kernel(Update_v),
                 endtime=1,
                 dt=0.1,
                 output_file=pset.ParticleFile(name=filepath, outputdt=0.1))
    assert np.allclose([p.v_once - p.age * 10 for p in pset], 0, atol=1e-5)
    ncfile = Dataset(filepath + ".nc", 'r', 'NETCDF4')
    vfile = ncfile.variables['v_once'][:]
    assert (vfile.shape == (npart * 11, ))
    assert [v == 0 for v in vfile]
Beispiel #5
0
def test_pset_create_fromparticlefile(fieldset, mode, restart, tmpdir):
    filename = tmpdir.join("pset_fromparticlefile.nc")
    lon = np.linspace(0, 1, 10, dtype=np.float32)
    lat = np.linspace(1, 0, 10, dtype=np.float32)
    pset = ParticleSet(fieldset, lon=lon, lat=lat, pclass=ptype[mode])
    pfile = pset.ParticleFile(filename, outputdt=1)

    def DeleteLast(particle, fieldset, time):
        if particle.lon == 1.:
            particle.delete()

    pset.execute(DeleteLast, runtime=2, dt=1, output_file=pfile)
    pfile.close()

    if restart:
        ptype[mode].setLastID(0)  # need to reset to zero
    pset_new = ParticleSet.from_particlefile(fieldset,
                                             pclass=ptype[mode],
                                             filename=filename,
                                             restart=restart)

    for var in ['lon', 'lat', 'depth', 'time']:
        assert np.allclose([getattr(p, var) for p in pset],
                           [getattr(p, var) for p in pset_new])

    if restart:
        assert np.allclose([p.id for p in pset], [p.id for p in pset_new])
Beispiel #6
0
def create_outputfiles(dir):
    datafile = path.join(path.dirname(__file__), 'test_data', 'testfields')

    fieldset = FieldSet.from_nemo(datafile, allow_time_extrapolation=True)
    pset = ParticleSet(fieldset=fieldset, lon=[], lat=[], pclass=JITParticle)
    npart = 10
    delaytime = delta(hours=1)
    endtime = delta(hours=24)
    x = 3. * (1. / 1.852 / 60)
    y = (fieldset.U.lat[0] + x, fieldset.U.lat[-1] - x)
    lat = np.linspace(y[0], y[1], npart, dtype=np.float32)

    fp_index = dir.join("DelayParticle")
    output_file = pset.ParticleFile(name=fp_index, type="indexed")

    for t in range(npart):
        pset.add(JITParticle(lon=x, lat=lat[t], fieldset=fieldset))
        pset.execute(AdvectionRK4,
                     runtime=delaytime,
                     dt=delta(minutes=5),
                     interval=delaytime,
                     output_file=output_file)

    pset.execute(AdvectionRK4,
                 runtime=endtime - npart * delaytime,
                 dt=delta(minutes=5),
                 interval=delta(hours=1),
                 output_file=output_file)

    fp_array = dir.join("DelayParticle_array")
    convert_IndexedOutputToArray(fp_index + '.nc', fp_array + '.nc')
    return fp_index, fp_array
Beispiel #7
0
def run_hycom_subset_monthly_release(input_dir,
                                     output_dir,
                                     output_name,
                                     time0,
                                     lon0,
                                     lat0,
                                     start_date,
                                     end_date,
                                     kh=10.,
                                     interp_method='linear',
                                     indices=_get_io_indices_from_netcdf()):
    # get paths
    ncfiles = get_daily_ncfiles_in_time_range(input_dir, start_date, end_date)
    output_path = output_dir + output_name
    # create fieldset
    filenames = [input_dir + ncfile for ncfile in ncfiles]
    variables = {'U': 'u', 'V': 'v'}
    dimensions = {'lat': 'lat', 'lon': 'lon', 'time': 'time'}
    fset = FieldSet.from_netcdf(filenames,
                                variables,
                                dimensions,
                                indices=indices)
    # add constant horizontal diffusivity (zero on land)
    lm = LandMask.read_from_netcdf()
    kh2D = kh * np.ones(lm.mask.shape)
    kh2D[lm.mask.astype('bool')] = 0.0  # diffusion zero on land
    kh2D_subset = kh2D[indices['lat'], :][:, indices['lon']]
    fset.add_field(
        Field('Kh_zonal',
              data=kh2D_subset,
              lon=fset.U.grid.lon,
              lat=fset.U.grid.lat,
              mesh='spherical',
              interp_method=interp_method))
    fset.add_field(
        Field('Kh_meridional',
              data=kh2D_subset,
              lon=fset.U.grid.lon,
              lat=fset.U.grid.lat,
              mesh='spherical',
              interp_method=interp_method))
    # montly release
    pset = ParticleSet(fieldset=fset,
                       pclass=JITParticle,
                       lon=lon0,
                       lat=lat0,
                       time=time0)
    # execute
    run_time = timedelta(days=(end_date - start_date).days)
    dt = timedelta(hours=1)
    output_interval = 24
    kernel = pset.Kernel(AdvectionRK4) + pset.Kernel(DiffusionUniformKh)
    output_file = pset.ParticleFile(name=output_path,
                                    outputdt=dt * output_interval)
    pset.execute(kernel,
                 runtime=run_time,
                 dt=dt,
                 output_file=output_file,
                 verbose_progress=True,
                 recovery={ErrorCode.ErrorOutOfBounds: delete_particle})
Beispiel #8
0
def test_variable_written_once(fieldset, mode, tmpdir, npart):
    filepath = tmpdir.join("pfile_once_written_variables")

    def Update_v(particle, fieldset, time, dt):
        particle.v_once += 1.

    class MyParticle(ptype[mode]):
        v_once = Variable('v_once',
                          dtype=np.float32,
                          initial=1.,
                          to_write='once')

    lon = np.linspace(0, 1, npart, dtype=np.float32)
    lat = np.linspace(1, 0, npart, dtype=np.float32)
    pset = ParticleSet(fieldset, pclass=MyParticle, lon=lon, lat=lat)
    pset.execute(pset.Kernel(Update_v),
                 starttime=0.,
                 endtime=1,
                 dt=0.1,
                 interval=0.2,
                 output_file=pset.ParticleFile(name=filepath))
    ncfile = Dataset(filepath + ".nc", 'r', 'NETCDF4')
    V_once = ncfile.variables['v_once'][:]
    assert np.all([p.v_once == 11.0 for p in pset])
    assert (V_once.shape == (npart, ))
    assert (V_once[0] == 1.)
Beispiel #9
0
def test_variable_written_once(fieldset, mode, tmpdir, npart):
    filepath = tmpdir.join("pfile_once_written_variables.nc")

    def Update_v(particle, fieldset, time):
        particle.v_once += 1.
        particle.age += particle.dt

    class MyParticle(ptype[mode]):
        v_once = Variable('v_once',
                          dtype=np.float64,
                          initial=0.,
                          to_write='once')
        age = Variable('age', dtype=np.float32, initial=0.)

    lon = np.linspace(0, 1, npart)
    lat = np.linspace(1, 0, npart)
    time = np.arange(0, npart / 10., 0.1, dtype=np.float64)
    pset = ParticleSet(fieldset,
                       pclass=MyParticle,
                       lon=lon,
                       lat=lat,
                       time=time,
                       v_once=time)
    ofile = pset.ParticleFile(name=filepath, outputdt=0.1)
    pset.execute(pset.Kernel(Update_v), endtime=1, dt=0.1, output_file=ofile)

    assert np.allclose(
        [p.v_once - vo - p.age * 10 for p, vo in zip(pset, time)],
        0,
        atol=1e-5)
    ncfile = close_and_compare_netcdffiles(filepath, ofile)
    vfile = np.ma.filled(ncfile.variables['v_once'][:], np.nan)
    assert (vfile.shape == (npart, ))
    assert np.allclose(vfile, time)
    ncfile.close()
Beispiel #10
0
def test_variable_written_ondelete(fieldset, mode, tmpdir, npart=3):
    filepath = tmpdir.join("pfile_on_delete_written_variables")

    def move_west(particle, fieldset, time, dt):
        tmp = fieldset.U[time, particle.lon, particle.lat,
                         particle.depth]  # to trigger out-of-bounds error
        particle.lon -= 0.1 + tmp

    def DeleteP(particle, fieldset, time, dt):
        particle.delete()

    lon = np.linspace(0.05, 0.95, npart, dtype=np.float32)
    lat = np.linspace(0.95, 0.05, npart, dtype=np.float32)

    (dt, runtime) = (0.1, 0.8)
    lon_end = lon - runtime / dt * 0.1
    noutside = len(lon_end[lon_end < 0])

    pset = ParticleSet(fieldset, pclass=ptype[mode], lon=lon, lat=lat)

    outfile = pset.ParticleFile(name=filepath, write_ondelete=True)
    pset.execute(move_west,
                 runtime=runtime,
                 dt=dt,
                 output_file=outfile,
                 recovery={ErrorCode.ErrorOutOfBounds: DeleteP})
    ncfile = Dataset(filepath + ".nc", 'r', 'NETCDF4')
    lon = ncfile.variables['lon'][:]
    assert (lon.size == noutside)
Beispiel #11
0
def test_pset_repeated_release_delayed_adding(fieldset,
                                              mode,
                                              repeatdt,
                                              tmpdir,
                                              dt,
                                              npart=10):
    class MyParticle(ptype[mode]):
        sample_var = Variable('sample_var', initial=0.)

    pset = ParticleSet(fieldset,
                       lon=[0],
                       lat=[0],
                       pclass=MyParticle,
                       repeatdt=repeatdt)
    outfilepath = tmpdir.join("pfile_repeatdt")
    pfile = pset.ParticleFile(outfilepath, outputdt=abs(dt))

    def IncrLon(particle, fieldset, time, dt):
        particle.sample_var += 1.

    for i in range(npart):
        assert len(pset) == (i // repeatdt) + 1
        pset.execute(IncrLon, dt=dt, runtime=1., output_file=pfile)
    assert np.allclose([p.sample_var for p in pset],
                       np.arange(npart, -1, -repeatdt))
    ncfile = Dataset(outfilepath + ".nc", 'r', 'NETCDF4')
    samplevar = ncfile.variables['sample_var'][:]
    assert samplevar.shape == (len(pset), npart + 1)
Beispiel #12
0
class virtualfleet:
    """
    USAGE:
    lat,lon,depth,time : numpy arrays describing initial set of floats
    vfield : velocityfield object
    """
    def __init__(self, **kwargs):
        #props
        self.lat = kwargs['lat']
        self.lon = kwargs['lon']
        self.depth = kwargs['depth']
        self.time = kwargs['time']
        vfield = kwargs['vfield']
        #define parcels particleset
        self.pset = ParticleSet(fieldset=vfield.fieldset,
                                pclass=ArgoParticle,
                                lon=self.lon,
                                lat=self.lat,
                                depth=self.depth,
                                time=self.time)
        if vfield.isglobal:
            # combine Argo vertical movement kernel with Advection kernel + boundaries
            self.kernels = ArgoVerticalMovement + self.pset.Kernel(
                AdvectionRK4) + self.pset.Kernel(periodicBC)
        else:
            self.kernels = ArgoVerticalMovement + self.pset.Kernel(
                AdvectionRK4)

    def plotfloat(self):
        #using parcels psel builtin show function for now
        self.pset.show()

    def simulate(self, **kwargs):
        """
        USAGE:
        duration : number of days (365)
        dt_run : time step in hours for the computation (1/12)        
        dt_out : time step in hours for writing the output (24)
        output_file
        """
        duration = kwargs['duration']
        dt_run = kwargs['dt_run']
        dt_out = kwargs['dt_out']
        output_file = kwargs['output_file']
        self.run_params = {
            'duration': duration,
            'dt_run': dt_run,
            'dt_out': dt_out,
            'output_file': output_file
        }

        # Now execute the kernels for X days, saving data every Y minutes
        self.pset.execute(
            self.kernels,
            runtime=timedelta(days=duration),
            dt=timedelta(hours=dt_run),
            output_file=self.pset.ParticleFile(
                name=output_file, outputdt=timedelta(hours=dt_out)),
            recovery={ErrorCode.ErrorOutOfBounds: DeleteParticle})
Beispiel #13
0
def test_pfile_array_remove_particles(fieldset, mode, tmpdir, npart=10):
    filepath = tmpdir.join("pfile_array_remove_particles")
    pset = ParticleSet(fieldset, pclass=ptype[mode],
                       lon=np.linspace(0, 1, npart, dtype=np.float32),
                       lat=0.5*np.ones(npart, dtype=np.float32))
    pfile = pset.ParticleFile(filepath)
    pfile.write(pset, 0)
    pset.remove(3)
    pfile.write(pset, 1)
Beispiel #14
0
def p_advect(ptype=JITParticle,
             outname='noname',
             pos=0,
             y=2001,
             m=1,
             d=1,
             simdays=90):
    """
    Main function for execution
        - outname: name of the output file. Note that all important parameters are also in the file name.
        - pos: Execution is manually parallelized over different initial position grids. These are indexed.
        - y, m, d: year, month an day of the simulation start
        - simdays: number of days to simulate
        - particledepth: for fixed-depth simulations. Index of nemo depth grid
    """

    print '-------------------------'
    print 'Start run... Parameters: '
    print '-------------------------'
    print 'Initial time (y, m, d): ', (y, m, d)
    print 'Simulation days', simdays
    print '-------------------------'

    #Load initial particle positions (grids) from external files
    lons = np.load(griddir + 'Lons' + str(pos) + '.npy')
    lats = np.load(griddir + 'Lats' + str(pos) + '.npy')

    depths = [1.5] * len(lons)
    times = [datetime(y, m, d)] * len(lons)
    print 'Number of particles: ', len(lons)

    fieldset = get_nemo()

    outfile = outputdir + outname + '3D_y' + str(y) + '_m' + str(
        m) + '_d' + str(d) + '_simdays' + str(simdays) + '_pos' + str(pos)

    fieldset.U.vmax = 10
    fieldset.V.vmax = 10
    fieldset.W.vmax = 10

    pset = ParticleSet(fieldset=fieldset,
                       pclass=ptype,
                       lon=lons,
                       lat=lats,
                       time=times,
                       depth=depths)

    kernels = pset.Kernel(AdvectionRK4_3D) + pset.Kernel(periodicBC)

    #Trajectory computation
    pset.execute(kernels,
                 runtime=timedelta(days=simdays),
                 dt=timedelta(minutes=10),
                 output_file=pset.ParticleFile(name=outfile,
                                               outputdt=timedelta(days=30)),
                 recovery={ErrorCode.ErrorOutOfBounds: DeleteParticle},
                 verbose_progress=False)
def test_error_duplicate_outputdir(fieldset, tmpdir):
    outfilepath = tmpdir.join("error_duplicate_outputdir.nc")
    pset1 = ParticleSet(fieldset, pclass=JITParticle, lat=0, lon=0)
    pset2 = ParticleSet(fieldset, pclass=JITParticle, lat=0, lon=0)

    py_random.seed(1234)
    pfile1 = pset1.ParticleFile(name=outfilepath,
                                outputdt=1.,
                                convert_at_end=False)

    py_random.seed(1234)
    error_thrown = False
    try:
        pset2.ParticleFile(name=outfilepath, outputdt=1., convert_at_end=False)
    except IOError:
        error_thrown = True
    assert error_thrown

    pfile1.delete_tempwritedir()
def decaying_moving_example(fieldset, mode='scipy', method=AdvectionRK4):
    pset = ParticleSet(fieldset, pclass=ptype[mode], lon=start_lon, lat=start_lat)

    endtime = delta(days=2)
    dt = delta(minutes=5)
    interval = delta(hours=1)

    pset.execute(method, endtime=endtime, dt=dt, interval=interval,
                 output_file=pset.ParticleFile(name="DecayingMovingParticle"), show_movie=False)

    return pset
Beispiel #17
0
def decaying_moving_example(fieldset, outfile, mode='scipy', method=AdvectionRK4):
    pset = ParticleSet(fieldset, pclass=ptype[mode], lon=start_lon, lat=start_lat)

    dt = delta(minutes=5)
    runtime = delta(days=2)
    outputdt = delta(hours=1)

    pset.execute(method, runtime=runtime, dt=dt, moviedt=None,
                 output_file=pset.ParticleFile(name=outfile, outputdt=outputdt))

    return pset
Beispiel #18
0
def test_delay_start_example(mode, npart=10, show_movie=False):
    """Example script that shows how to 'delay' the start of particle advection.
    This is useful for example when particles need to be started at different times

    In this example, we use pset.add statements to add one particle every hour
    in the peninsula fieldset. Note that the title in the movie may not show correct time"""

    fieldset = FieldSet.from_nemo(path.join(path.dirname(__file__),
                                            'Peninsula_data', 'peninsula'),
                                  extra_fields={'P': 'P'},
                                  allow_time_extrapolation=True)

    # Initialise particles as in the Peninsula example
    x = 3. * (1. / 1.852 / 60)  # 3 km offset from boundary
    y = (fieldset.U.lat[0] + x, fieldset.U.lat[-1] - x
         )  # latitude range, including offsets

    lat = np.linspace(y[0], y[1], npart, dtype=np.float32)
    pset = ParticleSet(fieldset, lon=[], lat=[], pclass=ptype[mode])

    delaytime = delta(hours=1)  # delay time between particle releases

    # Since we are going to add particles during runtime, we need "indexed" NetCDF file
    output_file = pset.ParticleFile(name="DelayParticle", type="indexed")

    for t in range(npart):
        pset.add(ptype[mode](lon=x, lat=lat[t], fieldset=fieldset))
        pset.execute(AdvectionRK4,
                     runtime=delaytime,
                     dt=delta(minutes=5),
                     interval=delta(hours=1),
                     show_movie=show_movie,
                     starttime=delaytime * t,
                     output_file=output_file)

    # Note that time on the movie is not parsed correctly
    pset.execute(AdvectionRK4,
                 runtime=delta(hours=24) - npart * delaytime,
                 starttime=delaytime * npart,
                 dt=delta(minutes=5),
                 interval=delta(hours=1),
                 show_movie=show_movie,
                 output_file=output_file)

    londist = np.array([(p.lon - x) for p in pset])
    assert (londist > 0.1).all()

    # Test whether time was written away correctly in file
    pfile = Dataset("DelayParticle.nc", 'r')
    id = pfile.variables['trajectory'][:]
    time = pfile.variables['time'][id == id[0]]
    assert all(time[1:] - time[0:-1] == time[1] - time[0])
    pfile.close()
def test_variable_write_double(fieldset, mode, tmpdir):
    filepath = tmpdir.join("pfile_variable_write_double")

    def Update_lon(particle, fieldset, time):
        particle.lon += 0.1

    pset = ParticleSet(fieldset, pclass=JITParticle, lon=[0], lat=[0], lonlatdepth_dtype=np.float64)
    pset.execute(pset.Kernel(Update_lon), endtime=1, dt=0.1,
                 output_file=pset.ParticleFile(name=filepath, outputdt=0.1))
    ncfile = Dataset(filepath+".nc", 'r', 'NETCDF4')
    lons = ncfile.variables['lon'][:]
    assert (isinstance(lons[0, 0], np.float64))
def test_pfile_array_remove_all_particles(fieldset, mode, tmpdir, npart=10):

    filepath = tmpdir.join("pfile_array_remove_particles")
    pset = ParticleSet(fieldset, pclass=ptype[mode],
                       lon=np.linspace(0, 1, npart),
                       lat=0.5*np.ones(npart))
    pfile = pset.ParticleFile(filepath)
    pfile.write(pset, 0)
    for _ in range(npart):
        pset.remove(-1)
    pfile.write(pset, 1)
    pfile.write(pset, 2)
Beispiel #21
0
def test_pfile_array_remove_particles(fieldset, mode, tmpdir, npart=10):
    filepath = tmpdir.join("pfile_array_remove_particles.nc")
    pset = ParticleSet(fieldset,
                       pclass=ptype[mode],
                       lon=np.linspace(0, 1, npart),
                       lat=0.5 * np.ones(npart))
    pfile = pset.ParticleFile(filepath)
    pfile.write(pset, 0)
    pset.remove(3)
    pfile.write(pset, 1)
    ncfile = close_and_compare_netcdffiles(filepath, pfile)
    ncfile.close()
Beispiel #22
0
def test_uniform_analytical(mode, u, v, w, direction, tmpdir):
    lon = np.arange(0, 15, dtype=np.float32)
    lat = np.arange(0, 15, dtype=np.float32)
    if w is not None:
        depth = np.arange(0, 40, 2, dtype=np.float32)
        U = u * np.ones((depth.size, lat.size, lon.size), dtype=np.float32)
        V = v * np.ones((depth.size, lat.size, lon.size), dtype=np.float32)
        W = w * np.ones((depth.size, lat.size, lon.size), dtype=np.float32)
        fieldset = FieldSet.from_data({
            'U': U,
            'V': V,
            'W': W
        }, {
            'lon': lon,
            'lat': lat,
            'depth': depth
        },
                                      mesh='flat')
        fieldset.W.interp_method = 'cgrid_velocity'
    else:
        U = u * np.ones((lat.size, lon.size), dtype=np.float32)
        V = v * np.ones((lat.size, lon.size), dtype=np.float32)
        fieldset = FieldSet.from_data({
            'U': U,
            'V': V
        }, {
            'lon': lon,
            'lat': lat
        },
                                      mesh='flat')
    fieldset.U.interp_method = 'cgrid_velocity'
    fieldset.V.interp_method = 'cgrid_velocity'

    x0, y0, z0 = 6.1, 6.2, 20
    pset = ParticleSet(fieldset, pclass=ptype[mode], lon=x0, lat=y0, depth=z0)

    outfile = tmpdir.join("uniformanalytical.nc")
    pset.execute(AdvectionAnalytical,
                 runtime=4,
                 dt=direction,
                 output_file=pset.ParticleFile(name=outfile, outputdt=1))
    assert np.abs(pset.lon - x0 - 4 * u * direction) < 1e-6
    assert np.abs(pset.lat - y0 - 4 * v * direction) < 1e-6
    if w:
        assert np.abs(pset.depth - z0 - 4 * w * direction) < 1e-4

    time = Dataset(outfile, 'r', 'NETCDF4').variables['time'][:]
    assert np.allclose(time, direction * np.arange(0, 5))
    lons = Dataset(outfile, 'r', 'NETCDF4').variables['lon'][:]
    assert np.allclose(lons, x0 + direction * u * np.arange(0, 5))
Beispiel #23
0
def run_dampedoscillation(fieldset, start_lon, start_lat, outfilename):
    pset = ParticleSet(fieldset,
                       pclass=JITParticle,
                       lon=start_lon,
                       lat=start_lat)

    outfile = pset.ParticleFile(name=outfilename, outputdt=delta(hours=1))
    pset.execute(AdvectionRK4,
                 runtime=delta(days=4),
                 dt=delta(minutes=5),
                 output_file=outfile)

    x, y = true_values(pset[0].time, start_lon, start_lat)
    print(x - pset[0].lon) / 1000, (y - pset[0].lat) / 1000
Beispiel #24
0
def test_write_timebackward(fieldset, mode, tmpdir):
    outfilepath = tmpdir.join("pfile_write_timebackward.nc")

    def Update_lon(particle, fieldset, time):
        particle.lon -= 0.1 * particle.dt

    pset = ParticleSet(fieldset,
                       pclass=JITParticle,
                       lat=np.linspace(0, 1, 3),
                       lon=[0, 0, 0],
                       time=[1, 2, 3])
    pfile = pset.ParticleFile(name=outfilepath, outputdt=1.)
    pset.execute(pset.Kernel(Update_lon), runtime=4, dt=-1., output_file=pfile)
    ncfile = close_and_compare_netcdffiles(outfilepath, pfile)
    trajs = ncfile.variables['trajectory'][:, 0]
    assert np.all(
        np.diff(trajs) > 0)  # all particles written in order of traj ID
Beispiel #25
0
def test_globcurrent_pset_fromfile(mode, dt, pid_offset, tmpdir):
    filename = tmpdir.join("pset_fromparticlefile.nc")
    fieldset = set_globcurrent_fieldset()

    ptype[mode].setLastID(pid_offset)
    pset = ParticleSet(fieldset, pclass=ptype[mode], lon=25, lat=-35)
    pfile = pset.ParticleFile(filename, outputdt=delta(hours=6))
    pset.execute(AdvectionRK4, runtime=delta(days=1), dt=dt, output_file=pfile)
    pfile.close()

    restarttime = np.nanmax if dt > 0 else np.nanmin
    pset_new = ParticleSet.from_particlefile(fieldset, pclass=ptype[mode], filename=filename, restarttime=restarttime)
    pset.execute(AdvectionRK4, runtime=delta(days=1), dt=dt)
    pset_new.execute(AdvectionRK4, runtime=delta(days=1), dt=dt)

    for var in ['lon', 'lat', 'depth', 'time', 'id']:
        assert np.allclose([getattr(p, var) for p in pset], [getattr(p, var) for p in pset_new])
Beispiel #26
0
def test_variable_write_double(fieldset, mode, tmpdir):
    filepath = tmpdir.join("pfile_variable_write_double.nc")

    def Update_lon(particle, fieldset, time):
        particle.lon += 0.1

    pset = ParticleSet(fieldset,
                       pclass=ptype[mode],
                       lon=[0],
                       lat=[0],
                       lonlatdepth_dtype=np.float64)
    ofile = pset.ParticleFile(name=filepath, outputdt=0.1)
    pset.execute(pset.Kernel(Update_lon), endtime=1, dt=0.1, output_file=ofile)

    ncfile = close_and_compare_netcdffiles(filepath, ofile)
    lons = ncfile.variables['lon'][:]
    assert (isinstance(lons[0, 0], np.float64))
    ncfile.close()
def test_reset_dt(fieldset, mode, tmpdir):
    # Assert that p.dt gets reset when a write_time is not a multiple of dt
    # for p.dt=0.02 to reach outputdt=0.05 and endtime=0.1, the steps should be [0.2, 0.2, 0.1, 0.2, 0.2, 0.1], resulting in 6 kernel executions
    filepath = tmpdir.join("pfile_reset_dt.nc")

    def Update_lon(particle, fieldset, time):
        particle.lon += 0.1

    pset = ParticleSet(fieldset,
                       pclass=ptype[mode],
                       lon=[0],
                       lat=[0],
                       lonlatdepth_dtype=np.float64)
    ofile = pset.ParticleFile(name=filepath, outputdt=0.05)
    pset.execute(pset.Kernel(Update_lon),
                 endtime=0.1,
                 dt=0.02,
                 output_file=ofile)

    assert np.allclose(pset.lon, .6)
def test_pset_create_fromparticlefile(fieldset, mode, restart, tmpdir):
    filename = tmpdir.join("pset_fromparticlefile.nc")
    lon = np.linspace(0, 1, 10, dtype=np.float32)
    lat = np.linspace(1, 0, 10, dtype=np.float32)

    class TestParticle(ptype[mode]):
        p = Variable('p', np.float32, initial=0.33)
        p2 = Variable('p2', np.float32, initial=1, to_write=False)
        p3 = Variable('p3', np.float32, to_write='once')

    pset = ParticleSet(fieldset,
                       lon=lon,
                       lat=lat,
                       depth=[4] * len(lon),
                       pclass=TestParticle,
                       p3=np.arange(len(lon)))
    pfile = pset.ParticleFile(filename, outputdt=1)

    def Kernel(particle, fieldset, time):
        particle.p = 2.
        if particle.lon == 1.:
            particle.delete()

    pset.execute(Kernel, runtime=2, dt=1, output_file=pfile)
    pfile.close()

    pset_new = ParticleSet.from_particlefile(fieldset,
                                             pclass=TestParticle,
                                             filename=filename,
                                             restart=restart,
                                             repeatdt=1)

    for var in ['lon', 'lat', 'depth', 'time', 'p', 'p2', 'p3']:
        assert np.allclose([getattr(p, var) for p in pset],
                           [getattr(p, var) for p in pset_new])

    if restart:
        assert np.allclose([p.id for p in pset], [p.id for p in pset_new])
    pset_new.execute(Kernel, runtime=2, dt=1)
    assert len(pset_new) == 3 * len(pset)
Beispiel #29
0
def test_variable_written_ondelete(fieldset,
                                   mode,
                                   tmpdir,
                                   assystemcall,
                                   npart=3):
    filepath = tmpdir.join("pfile_on_delete_written_variables.nc")

    def move_west(particle, fieldset, time):
        tmp = fieldset.U[time, particle.depth, particle.lat,
                         particle.lon]  # to trigger out-of-bounds error
        particle.lon -= 0.1 + tmp

    def DeleteP(particle, fieldset, time):
        particle.delete()

    lon = np.linspace(0.05, 0.95, npart)
    lat = np.linspace(0.95, 0.05, npart)

    (dt, runtime) = (0.1, 0.8)
    lon_end = lon - runtime / dt * 0.1
    noutside = len(lon_end[lon_end < 0])

    pset = ParticleSet(fieldset, pclass=ptype[mode], lon=lon, lat=lat)

    outfile = pset.ParticleFile(name=filepath, write_ondelete=True)
    outfile.add_metadata('runtime', runtime)
    pset.execute(move_west,
                 runtime=runtime,
                 dt=dt,
                 output_file=outfile,
                 recovery={ErrorCode.ErrorOutOfBounds: DeleteP})

    ncfile = close_and_compare_netcdffiles(filepath,
                                           outfile,
                                           assystemcall=assystemcall)
    assert ncfile.runtime == runtime
    lon = ncfile.variables['lon'][:]
    assert (lon.size == noutside)
    ncfile.close()
Beispiel #30
0
def test_pset_repeated_release_delayed_adding_deleting(fieldset,
                                                       mode,
                                                       repeatdt,
                                                       tmpdir,
                                                       dt,
                                                       maxvar,
                                                       runtime=10):
    fieldset.maxvar = maxvar

    class MyParticle(ptype[mode]):
        sample_var = Variable('sample_var', initial=0.)

    pset = ParticleSet(fieldset,
                       lon=[0],
                       lat=[0],
                       pclass=MyParticle,
                       repeatdt=repeatdt)
    outfilepath = tmpdir.join("pfile_repeatdt")
    pfile = pset.ParticleFile(outfilepath, outputdt=abs(dt))

    def IncrLon(particle, fieldset, time, dt):
        particle.sample_var += 1.
        if particle.sample_var > fieldset.maxvar:
            particle.delete()

    for i in range(runtime):
        pset.execute(IncrLon, dt=dt, runtime=1., output_file=pfile)
    assert np.allclose([p.sample_var for p in pset],
                       np.arange(maxvar, -1, -repeatdt))
    ncfile = Dataset(outfilepath + ".nc", 'r', 'NETCDF4')
    samplevar = ncfile.variables['sample_var'][:]
    assert samplevar.shape == (runtime // repeatdt + 1,
                               min(maxvar + 1, runtime) + 1)
    if repeatdt == 0:
        # test whether samplevar[i, i+k] = k for k=range(maxvar)
        for k in range(maxvar):
            for i in range(runtime - k):
                assert (samplevar[i, i + k] == k)