Ejemplo n.º 1
0
def test_pset_create_fromparticlefile(fieldset, mode, restart, tmpdir):
    filename = tmpdir.join("pset_fromparticlefile.nc")
    lon = np.linspace(0, 1, 10, dtype=np.float32)
    lat = np.linspace(1, 0, 10, dtype=np.float32)
    pset = ParticleSet(fieldset, lon=lon, lat=lat, pclass=ptype[mode])
    pfile = pset.ParticleFile(filename, outputdt=1)

    def DeleteLast(particle, fieldset, time):
        if particle.lon == 1.:
            particle.delete()

    pset.execute(DeleteLast, runtime=2, dt=1, output_file=pfile)
    pfile.close()

    if restart:
        ptype[mode].setLastID(0)  # need to reset to zero
    pset_new = ParticleSet.from_particlefile(fieldset,
                                             pclass=ptype[mode],
                                             filename=filename,
                                             restart=restart)

    for var in ['lon', 'lat', 'depth', 'time']:
        assert np.allclose([getattr(p, var) for p in pset],
                           [getattr(p, var) for p in pset_new])

    if restart:
        assert np.allclose([p.id for p in pset], [p.id for p in pset_new])
Ejemplo n.º 2
0
def test_globcurrent_pset_fromfile(mode, dt, pid_offset, tmpdir):
    filename = tmpdir.join("pset_fromparticlefile.nc")
    fieldset = set_globcurrent_fieldset()

    ptype[mode].setLastID(pid_offset)
    pset = ParticleSet(fieldset, pclass=ptype[mode], lon=25, lat=-35)
    pfile = pset.ParticleFile(filename, outputdt=delta(hours=6))
    pset.execute(AdvectionRK4, runtime=delta(days=1), dt=dt, output_file=pfile)
    pfile.close()

    restarttime = np.nanmax if dt > 0 else np.nanmin
    pset_new = ParticleSet.from_particlefile(fieldset, pclass=ptype[mode], filename=filename, restarttime=restarttime)
    pset.execute(AdvectionRK4, runtime=delta(days=1), dt=dt)
    pset_new.execute(AdvectionRK4, runtime=delta(days=1), dt=dt)

    for var in ['lon', 'lat', 'depth', 'time', 'id']:
        assert np.allclose([getattr(p, var) for p in pset], [getattr(p, var) for p in pset_new])
Ejemplo n.º 3
0
def test_pset_create_fromparticlefile(fieldset, mode, restart, tmpdir):
    filename = tmpdir.join("pset_fromparticlefile.nc")
    lon = np.linspace(0, 1, 10, dtype=np.float32)
    lat = np.linspace(1, 0, 10, dtype=np.float32)

    class TestParticle(ptype[mode]):
        p = Variable('p', np.float32, initial=0.33)
        p2 = Variable('p2', np.float32, initial=1, to_write=False)
        p3 = Variable('p3', np.float32, to_write='once')

    pset = ParticleSet(fieldset,
                       lon=lon,
                       lat=lat,
                       depth=[4] * len(lon),
                       pclass=TestParticle,
                       p3=np.arange(len(lon)))
    pfile = pset.ParticleFile(filename, outputdt=1)

    def Kernel(particle, fieldset, time):
        particle.p = 2.
        if particle.lon == 1.:
            particle.delete()

    pset.execute(Kernel, runtime=2, dt=1, output_file=pfile)
    pfile.close()

    pset_new = ParticleSet.from_particlefile(fieldset,
                                             pclass=TestParticle,
                                             filename=filename,
                                             restart=restart,
                                             repeatdt=1)

    for var in ['lon', 'lat', 'depth', 'time', 'p', 'p2', 'p3']:
        assert np.allclose([getattr(p, var) for p in pset],
                           [getattr(p, var) for p in pset_new])

    if restart:
        assert np.allclose([p.id for p in pset], [p.id for p in pset_new])
    pset_new.execute(Kernel, runtime=2, dt=1)
    assert len(pset_new) == 3 * len(pset)
Ejemplo n.º 4
0
def sequential(start_date,
               end_date,
               config,
               name='',
               winds=True,
               diffusion=True,
               unbeaching=True,
               restart_file=""):
    # years = config[WorldLitter.years]
    years = np.arange(start_date.year, end_date.year + 1)
    base_folder = config[GlobalModel.base_folder]
    release_loc_folder = config[GlobalModel.loc_folder]
    output_file = join(config[GlobalModel.output_folder], name)
    unbeach_file = config[GlobalModel.unbeach_file]
    lat_files = config[GlobalModel.lat_files]
    lon_files = config[GlobalModel.lon_files]
    dt = config[GlobalModel.dt]
    kh = 1
    repeat_release = config[GlobalModel.repeat_release]

    run_time = timedelta(seconds=(end_date - start_date).total_seconds())

    file_names = read_files(base_folder,
                            years,
                            wind=winds,
                            start_date=start_date,
                            end_date=end_date)
    if len(file_names) == 0:
        raise Exception("ERROR: We couldn't read any file!")

    print("Reading initial positions.....")
    lat0 = functools.reduce(lambda a, b: np.concatenate((a, b), axis=0), [
        np.genfromtxt(join(release_loc_folder, x), delimiter='')
        for x in lat_files
    ])
    lon0 = functools.reduce(lambda a, b: np.concatenate((a, b), axis=0), [
        np.genfromtxt(join(release_loc_folder, x), delimiter='')
        for x in lon_files
    ])

    variables = {'U': 'surf_u', 'V': 'surf_v'}

    dimensions = {'lat': 'latitude', 'lon': 'longitude', 'time': 'time'}

    print("Reading netcdf files.....", flush=True)
    # Adding the vector fields it may be currents or currents + winds
    main_fieldset = FieldSet.from_netcdf(file_names,
                                         variables,
                                         dimensions,
                                         allow_time_extrapolation=True,
                                         field_chunksize=(2048, 2048))
    # -------  Making syntetic diffusion coefficient
    U_grid = main_fieldset.U.grid
    lat = U_grid.lat
    lon = U_grid.lon
    # Getting proporcional size by degree
    if diffusion:
        print("Adding diffusion .....")
        add_Kh(main_fieldset, lat, lon, kh)
    if unbeaching:
        print("Adding unbeaching.....")
        add_unbeaching_field(main_fieldset, lat, lon, unbeach_file)

    # -------  Adding constants for periodic halo
    main_fieldset.add_constant('halo_west', main_fieldset.U.grid.lon[0])
    main_fieldset.add_constant('halo_east', main_fieldset.U.grid.lon[-1])
    main_fieldset.add_periodic_halo(zonal=True)  #create a zonal halo

    print("Setting up everything.....")
    if unbeaching:
        particle_class = LitterParticle
    else:
        particle_class = JITParticle

    if restart_file != '':
        print(F"Using restart file {restart_file}")
        pset = ParticleSet.from_particlefile(fieldset=main_fieldset,
                                             pclass=particle_class,
                                             filename=restart_file,
                                             repeatdt=repeat_release)
    else:
        pset = ParticleSet(fieldset=main_fieldset,
                           pclass=particle_class,
                           lon=lon0,
                           lat=lat0,
                           repeatdt=repeat_release)

    out_parc_file = pset.ParticleFile(name=output_file,
                                      outputdt=config[GlobalModel.output_freq])
    t = time.time()

    print(F"Adding kernels...")
    if unbeaching:
        kernels = pset.Kernel(AdvectionRK4Beached)
    else:
        kernels = pset.Kernel(AdvectionRK4)

    if unbeaching:
        kernels += pset.Kernel(BeachTesting_2D)
        kernels += pset.Kernel(UnBeaching)
        if diffusion:
            kernels += pset.Kernel(BrownianMotion2DUnbeaching)
            kernels += pset.Kernel(BeachTesting_2D)
    else:
        if diffusion:
            kernels += pset.Kernel(BrownianMotion2D)

    kernels += pset.Kernel(periodicBC)

    print(F"Running with {pset.size} number of particles for {run_time}",
          flush=True)
    pset.execute(kernels,
                 runtime=run_time,
                 dt=dt,
                 output_file=out_parc_file,
                 recovery={ErrorCode.ErrorOutOfBounds: outOfBounds})

    print(F"Done time={time.time()-t}.....")

    print(F"Saving output to {output_file}!!!!!")
    # domain = {'N': 31, 'S': 16, 'E': -76, 'W': -98}
    # pset.show(field=main_fieldset.U, domain=domain)  # Draw current particles
    out_parc_file.export()  # Save trajectories to file

    if MPI:
        print(
            F"----- Waiting for file to be saved proc {MPI.COMM_WORLD.Get_rank()} ... ---------",
            flush=True)
        MPI.COMM_WORLD.Barrier()

    # out_parc_file.close()
    # del pset
    # del kernels
    # del main_fieldset
    # # plotTrajectoriesFile(output_file) # Plotting trajectories
    # print("Forcing gc.collect")
    # gc.collect()

    print("Done!!!!!!!!!!!! YEAH BABE!!!!!!!!", flush=True)