Пример #1
0
 def store_state_and_check_file(self, aliases):
     assert not os.path.isfile(self.ncfile)
     monitor = NetCDFMonitor(self.ncfile,
                             aliases=aliases,
                             write_on_store=True)
     monitor.store(state)
     assert os.path.isfile(self.ncfile)
Пример #2
0
def test_netcdf_monitor_multiple_times_batched_all_vars():
    time_list = [
        datetime(2013, 7, 20, 0),
        datetime(2013, 7, 20, 6),
        datetime(2013, 7, 20, 12),
    ]
    current_state = state.copy()
    try:
        assert not os.path.isfile('out.nc')
        monitor = NetCDFMonitor('out.nc')
        for time in time_list:
            current_state['time'] = time
            monitor.store(current_state)
            assert not os.path.isfile('out.nc')  # not set to write on store
        monitor.write()
        assert os.path.isfile('out.nc')
        with xr.open_dataset('out.nc') as ds:
            assert len(ds.data_vars.keys()) == 2
            assert 'air_temperature' in ds.data_vars.keys()
            assert ds.data_vars['air_temperature'].attrs['units'] == 'degK'
            assert tuple(
                ds.data_vars['air_temperature'].shape) == (len(time_list), nx,
                                                           ny, nz)
            assert 'air_pressure' in ds.data_vars.keys()
            assert ds.data_vars['air_pressure'].attrs['units'] == 'Pa'
            assert tuple(
                ds.data_vars['air_pressure'].shape) == (len(time_list), nx, ny,
                                                        nz)
            assert len(ds['time']) == len(time_list)
            assert np.all(ds['time'].values ==
                          [np.datetime64(time) for time in time_list])
    finally:  # make sure we remove the output file
        if os.path.isfile('out.nc'):
            os.remove('out.nc')
Пример #3
0
def test_netcdf_monitor_single_write_on_store():
    try:
        assert not os.path.isfile('out.nc')
        monitor = NetCDFMonitor('out.nc', write_on_store=True)
        monitor.store(state)
        assert os.path.isfile('out.nc')
        with xr.open_dataset('out.nc') as ds:
            assert len(ds.data_vars.keys()) == 2
            assert 'air_temperature' in ds.data_vars.keys()
            assert ds.data_vars['air_temperature'].attrs['units'] == 'degK'
            assert tuple(ds.data_vars['air_temperature'].shape) == (1, nx, ny,
                                                                    nz)
            assert 'air_pressure' in ds.data_vars.keys()
            assert ds.data_vars['air_pressure'].attrs['units'] == 'Pa'
            assert tuple(ds.data_vars['air_pressure'].shape) == (1, nx, ny, nz)
            assert len(ds['time']) == 1
            assert ds['time'][0] == np.datetime64(state['time'])
    finally:  # make sure we remove the output file
        if os.path.isfile('out.nc'):
            os.remove('out.nc')
Пример #4
0
def test_netcdf_monitor_raises_when_names_change_on_batch_write():
    current_state = state.copy()
    try:
        assert not os.path.isfile('out.nc')
        monitor = NetCDFMonitor('out.nc')
        current_state['time'] = datetime(2013, 7, 20, 0)
        monitor.store(current_state)
        assert not os.path.isfile('out.nc')
        current_state['time'] = datetime(2013, 7, 20, 6)
        current_state['air_density'] = current_state['air_pressure']
        monitor.store(current_state)
        try:
            monitor.write()
        except InvalidStateError:
            pass
        except Exception as err:
            raise err
        else:
            raise AssertionError(
                'Expected InvalidStateError but was not raised.')
    finally:  # make sure we remove the output file
        if os.path.isfile('out.nc'):
            os.remove('out.nc')
Пример #5
0
# Set initial/boundary conditions
latitudes = my_state['latitude'].values
longitudes = my_state['longitude'].values

zenith_angle = np.radians(latitudes)
surface_shape = [len(longitudes), len(latitudes)]

my_state['zenith_angle'].values = zenith_angle
my_state['eastward_wind'].values[:] = np.random.randn(
    *my_state['eastward_wind'].shape)
my_state['ocean_mixed_layer_thickness'].values[:] = 50

surf_temp_profile = 290 - (40 * np.sin(zenith_angle)**2)
my_state['surface_temperature'].values = surf_temp_profile

for i in range(1500 * 24 * 6):
    diag, my_state = dycore(my_state, model_time_step)
    my_state.update(diag)
    my_state['time'] += model_time_step

    if i % (6 * 24) == 0:
        netcdf_monitor.store(my_state)
        monitor.store(my_state)
        print('max. zonal wind: ', np.amax(my_state['eastward_wind'].values))
        print('max. humidity: ', np.amax(my_state['specific_humidity'].values))
        print('max. surf temp: ',
              my_state['surface_temperature'].max(keep_attrs=True).values)

    print(my_state['time'])
Пример #6
0
def main():
    # ============ Adjustable Variables ============
    # Integration Options
    dt = timedelta(minutes=15)  # timestep
    duration = '48_00:00'       # run duration ('<days>_<hours>:<mins>')t
    linearized = True
    ncout_freq = 6              # netcdf write frequency (hours)
    plot_freq = 6               # plot Monitor call frequency (hours)
    ntrunc = 42                 # triangular truncation for spharm (e.g., 21 --> T21)

    # Diffusion Options
    diff_on = True              # Use diffusion?
    k = 2.338e16                # Diffusion coefficient for del^4 hyperdiffusion

    # Forcing Options
    forcing_on = True           # Apply vort. tendency forcing?
    damp_ts = 14.7              # Damping timescale (in days)

    # I/O Options
    ncoutfile = os.path.join(os.path.dirname(__file__), 'sardeshmukh88.nc')
    append_nc = False           # Append to an existing netCDF file?
    # ==============================================

    start = time()

    # Get the initial state
    state = super_rotation(linearized=linearized, ntrunc=ntrunc)

    # Set up the Timestepper with the desired Prognostics
    if linearized:
        dynamics_prog = LinearizedDynamics(ntrunc=ntrunc)
        diffusion_prog = LinearizedDiffusion(k=k, ntrunc=ntrunc)
        damping_prog = LinearizedDamping(tau=damp_ts)
    else:
        dynamics_prog = NonlinearDynamics(ntrunc=ntrunc)
        diffusion_prog = NonlinearDiffusion(k=k, ntrunc=ntrunc)
        damping_prog = NonlinearDamping(tau=damp_ts)
    prognostics = [TendencyInDiagnosticsWrapper(dynamics_prog, 'dynamics')]
    if diff_on:
        prognostics.append(TendencyInDiagnosticsWrapper(diffusion_prog, 'diffusion'))
    if forcing_on:
        # Get our suptropical RWS forcing (from equatorial divergence)
        rws, rlat, rlon = rws_from_tropical_divergence(state)
        prognostics.append(TendencyInDiagnosticsWrapper(Forcing.from_numpy_array(rws, rlat, rlon, ntrunc=ntrunc,
                                                                                 linearized=linearized), 'forcing'))
        prognostics.append(TendencyInDiagnosticsWrapper(damping_prog, 'damping'))
    stepper = Leapfrog(prognostics)

    # Create Monitors for plotting & storing data
    plt_monitor = PlotFunctionMonitor(debug_plots.fourpanel)
    if os.path.isfile(ncoutfile) and not append_nc:
        os.remove(ncoutfile)

    aliases = get_component_aliases(*prognostics)
    nc_monitor = NetCDFMonitor(ncoutfile, write_on_store=True, aliases=aliases)

    # Figure out the end date of this run
    d, h, m = re.split('[_:]', duration)
    end_date = state['time'] + timedelta(days=int(d), hours=int(h), minutes=int(m))

    # Begin the integration loop
    idate = state['time']
    while state['time'] <= end_date:
        # Get the state at the next timestep using our Timestepper
        diagnostics, next_state = stepper(state, dt)

        # Add any calculated diagnostics to our current state
        state.update(diagnostics)

        # Write state to netCDF every <ncout_freq> hours
        fhour = (state['time'] - idate).days*24 + (state['time'] - idate).seconds/3600
        if fhour % ncout_freq == 0:
            print(state['time'])
            nc_monitor.store(state)

        # Make plot(s) every <plot_freq> hours
        if fhour % plot_freq == 0:
            plt_monitor.store(state)

        # Advance the state to the next timestep
        next_state['time'] = state['time'] + dt
        state = next_state

    print('TOTAL INTEGRATION TIME: {:.02f} min\n'.format((time()-start)/60.))
Пример #7
0
# Figure out the end date of this run
d, h, m = re.split('[_:]', duration)
end_date = state['time'] + timedelta(days=int(d), hours=int(h), minutes=int(m))

# Begin the integration loop
idate = state['time']
while state['time'] <= end_date:

    # Get the state at the next timestep using our Timestepper
    diagnostics, next_state = stepper(state, dt)

    # Add any calculated diagnostics to our current state
    state.update(diagnostics)

    # Write state to netCDF every <ncout_freq> hours
    fhour = (state['time'] - idate).days * 24 + (state['time'] -
                                                 idate).seconds / 3600
    if fhour % ncout_freq == 0:
        print(state['time'])
        nc_monitor.store(state)

    # Make plot(s) every <plot_freq> hours
    if fhour % plot_freq == 0:
        plt_monitor.store(state)

    # Advance the state to the next timestep
    next_state['time'] = state['time'] + dt
    state = next_state

print('TOTAL INTEGRATION TIME: {:.02f} min\n'.format((time() - start) / 60.))
    [simple_physics, convection, radiation_lw, radiation_sw, slab])

state['air_temperature'].values[:] = 270
state['surface_albedo_for_direct_shortwave'].values[:] = 0.5
state['surface_albedo_for_direct_near_infrared'].values[:] = 0.5
state['surface_albedo_for_diffuse_shortwave'].values[:] = 0.5

state['zenith_angle'].values[:] = np.pi / 2.5
state['surface_temperature'].values[:] = 300.
state['ocean_mixed_layer_thickness'].values[:] = 5
state['area_type'].values[:] = 'sea'

time_stepper = AdamsBashforth([convection, radiation_lw, radiation_sw, slab])

for i in range(20000):
    convection.current_time_step = timestep
    diagnostics, state = time_stepper(state, timestep)
    state.update(diagnostics)
    diagnostics, new_state = simple_physics(state, timestep)
    state.update(diagnostics)
    if (i + 1) % 20 == 0:
        monitor.store(state)
        netcdf_monitor.store(state)
        print(i, state['surface_temperature'].values)
        print(state['surface_upward_sensible_heat_flux'])
        print(state['surface_upward_latent_heat_flux'])

    state.update(new_state)
    state['time'] += timestep
    state['eastward_wind'].values[:] = 3.
Пример #9
0
class MyModel():
    """Climate model class."""
    def __init__(self,
                 dt_seconds=1800,
                 nx=64,
                 ny=32,
                 nz=10,
                 state=None,
                 input_fields_to_store=input_vars,
                 output_fields_to_store=output_vars,
                 input_save_fn=None,
                 output_save_fn=None,
                 save_interval=6,
                 convection=None,
                 extra_components=[]):
        """
        Initialize model. Uses SSTs from Andersen and Kuang 2012.
        Creates initial state unless state is given.
        """
        climt.set_constants_from_dict(
            {'stellar_irradiance': {
                'value': 200,
                'units': 'W m^-2'
            }})

        self.model_time_step = timedelta(seconds=dt_seconds)
        self.step_counter = 0
        self.save_interval = save_interval

        # Create components
        if convection is None:
            convection = climt.EmanuelConvection(
                tendencies_in_diagnostics=True)
        simple_physics = TimeDifferencingWrapper(
            climt.SimplePhysics(tendencies_in_diagnostics=True))

        radiation = climt.GrayLongwaveRadiation(tendencies_in_diagnostics=True)

        components = [simple_physics, radiation, convection] + extra_components

        self.dycore = climt.GFSDynamicalCore(components,
                                             number_of_damped_levels=2)
        grid = climt.get_grid(nx=nx, ny=ny, nz=nz)

        if state is None:
            self.create_initial_state(grid)
        else:
            self.state = state

        if not input_save_fn is None:
            self.input_netcdf_monitor = NetCDFMonitor(
                input_save_fn,
                write_on_store=True,
                store_names=input_fields_to_store)
        if not output_save_fn is None:
            self.output_netcdf_monitor = NetCDFMonitor(
                output_save_fn,
                write_on_store=True,
                store_names=output_fields_to_store)

    def create_initial_state(self, grid):
        """Create initial state."""
        # Create model state
        self.state = climt.get_default_state([self.dycore], grid_state=grid)

        # Set initial/boundary conditions
        latitudes = self.state['latitude'].values
        sst_k = and_kua_sst(latitudes)

        self.state['surface_temperature'] = DataArray(sst_k,
                                                      dims=['lat', 'lon'],
                                                      attrs={'units': 'degK'})
        self.state['eastward_wind'].values[:] = np.random.randn(
            *self.state['eastward_wind'].shape)

    def step(self):
        """Take one time step forward."""
        self.diag, self.state = self.dycore(self.state, self.model_time_step)
        self.state.update(self.diag)
        self.state['time'] += self.model_time_step
        if hasattr(self, 'input_netcdf_monitor'):
            if self.step_counter % self.save_interval == 0:
                self.input_netcdf_monitor.store(self.state)
        if hasattr(self, 'output_netcdf_monitor'):
            if (self.step_counter - 1) % self.save_interval == 0:
                self.output_netcdf_monitor.store(self.state)
        self.step_counter += 1

    def iterate(self, steps, noprog=False):
        """Iterate over several time steps."""
        for i in tqdm(range(steps), disable=noprog):
            self.step()