예제 #1
0
def read_plot_ts_slice (file_path, grid, lon0=None, lat0=None, time_index=None, t_start=None, t_end=None, time_average=False, hmin=None, hmax=None, zmin=None, zmax=None, tmin=None, tmax=None, smin=None, smax=None, date_string=None, fig_name=None, second_file_path=None):

    # Make sure we'll end up with a single record in time
    if time_index is None and not time_average:
        print 'Error (read_plot_ts_slice): either specify time_index or set time_average=True.'
        sys.exit()

    if date_string is None and time_index is not None:
        # Determine what to write about the date
        date_string = parse_date(file_path=file_path, time_index=time_index)

    if not isinstance(grid, Grid):
        # This is the path to the NetCDF grid file, not a Grid object
        # Make a grid object from it
        grid = Grid(grid)

    # Read temperature
    if second_file_path is not None:
        file_path_use = find_variable(file_path, second_file_path, 'THETA')
    else:
        file_path_use = file_path        
    temp = mask_3d(read_netcdf(file_path_use, 'THETA', time_index=time_index, t_start=t_start, t_end=t_end, time_average=time_average), grid)
    # Read salinity
    if second_file_path is not None:
        file_path_use = find_variable(file_path, second_file_path, 'SALT')
    else:
        file_path_use = file_path        
    salt = mask_3d(read_netcdf(file_path_use, 'SALT', time_index=time_index, t_start=t_start, t_end=t_end, time_average=time_average), grid)

    # Plot
    ts_slice_plot(temp, salt, grid, lon0=lon0, lat0=lat0, hmin=hmin, hmax=hmax, zmin=zmin, zmax=zmax, tmin=tmin, tmax=tmax, smin=smin, smax=smax, date_string=date_string, fig_name=fig_name)
예제 #2
0
def gl_final(file_path, fig_name=None):

    xGL = read_netcdf(file_path, 'xGL')
    yGL = read_netcdf(file_path, 'yGL')

    fig, ax = gl_frame(xGL, yGL, -1, label='Final', move_box=True)
    finished_plot(fig, fig_name)
예제 #3
0
def fix_eraint_humidity (in_dir, out_dir, prec=32):

    in_dir = real_dir(in_dir)
    out_dir = real_dir(out_dir)

    # File paths
    in_head = in_dir + 'era_a_'
    in_tail = '_075.nc'
    out_head = out_dir + 'ERAinterim_spfh2m_'
    start_year = 1979
    end_year = 2017

    for year in range(start_year, end_year+1):
        in_file = in_head + str(year) + in_tail
        print 'Reading ' + in_file
        # Need temperature, pressure, and dew point
        temp = read_netcdf(in_file, 't2m')
        press = read_netcdf(in_file, 'msl')
        dewpoint = read_netcdf(in_file, 'd2m')
        # Calculate vapour pressure
        e = es0*np.exp(Lv/Rv*(1/temp - 1/dewpoint))
        # Calculate specific humidity
        spf = sh_coeff*e/(press - (1-sh_coeff)*e)
        # Now flip in latitude to match Matlab-generated files
        spf = spf[:,::-1,:]
        out_file = out_head + str(year)
        write_binary(spf, out_file, prec=prec)
        if year == end_year:
            # Copy the last timestep as in era_dummy_year
            spf_last = spf[-1,:]
            out_file = out_head + str(year+1)
            write_binary(spf_last, out_file, prec=prec)
예제 #4
0
 def __init__(self, model_path, expt, ensemble_member, max_lon=180):
     # Get path to one file on the tracer grid
     cmip_file = find_cmip6_files(model_path, expt, ensemble_member,
                                  'thetao', 'Omon')[0][0]
     self.lon_2d = fix_lon_range(read_netcdf(cmip_file, 'longitude'),
                                 max_lon=max_lon)
     self.lat_2d = read_netcdf(cmip_file, 'latitude')
     self.z = -1 * read_netcdf(cmip_file, 'lev')
     self.mask = read_netcdf(cmip_file, 'thetao', time_index=0).mask
     # And one on the u-grid
     cmip_file_u = find_cmip6_files(model_path, expt, ensemble_member, 'uo',
                                    'Omon')[0][0]
     self.lon_u_2d = fix_lon_range(read_netcdf(cmip_file_u, 'longitude'),
                                   max_lon=max_lon)
     self.lat_u_2d = read_netcdf(cmip_file_u, 'latitude')
     self.mask_u = read_netcdf(cmip_file_u, 'uo', time_index=0).mask
     # And one on the v-grid
     cmip_file_v = find_cmip6_files(model_path, expt, ensemble_member, 'vo',
                                    'Omon')[0][0]
     self.lon_v_2d = fix_lon_range(read_netcdf(cmip_file_v, 'longitude'),
                                   max_lon=max_lon)
     self.lat_v_2d = read_netcdf(cmip_file_v, 'latitude')
     self.mask_v = read_netcdf(cmip_file_v, 'vo', time_index=0).mask
     # Save grid dimensions too
     self.nx = self.lon_2d.shape[1]
     self.ny = self.lat_2d.shape[0]
     self.nz = self.z.size
예제 #5
0
def write_topo_files (nc_grid, bathy_file, draft_file, prec=64):

    bathy = read_netcdf(nc_grid, 'bathy')
    draft = read_netcdf(nc_grid, 'draft')
    write_binary(bathy, bathy_file, prec=prec)
    write_binary(draft, draft_file, prec=prec)
    print 'Files written successfully. Now go try them out! Make sure you update all the necessary variables in data, data.shelfice, SIZE.h, job scripts, etc.'
예제 #6
0
def timeseries_watermass_volume (file_path, grid, tmin=None, tmax=None, smin=None, smax=None, time_index=None, t_start=None, t_end=None, time_average=False):

    # Read T and S
    temp = read_netcdf(file_path, 'THETA', time_index=time_index, t_start=t_start, t_end=t_end, time_average=time_average)
    salt = read_netcdf(file_path, 'SALT', time_index=time_index, t_start=t_start, t_end=t_end, time_average=time_average)
    if len(temp.shape)==3:
        # Just one timestep; add a dummy time dimension
        temp = np.expand_dims(temp,0)
        salt = np.expand_dims(salt,0)
    # Set any unset bounds
    if tmin is None:
        tmin = -9999
    if tmax is None:
        tmax = 9999
    if smin is None:
        smin = -9999
    if smax is None:
        smax = 9999
    # Build the timeseries
    timeseries = []
    for t in range(temp.shape[0]):
        # Find points within these bounds
        index = (temp[t,:] >= tmin)*(temp[t,:] <= tmax)*(salt[t,:] >= smin)*(salt[t,:] <= smax)*(grid.hfac > 0)
        # Integrate volume of those cells, and get percent of total volume
        timeseries.append(np.sum(grid.dV[index])/np.sum(grid.dV)*100)
    return np.array(timeseries)
예제 #7
0
 def read_and_mask (var_name, file_path, check_diff_time=False, gtype='t'):
     if var_name in ['tminustf', 'rho']:
         # Need to read 2 variables
         temp = read_and_mask('THETA', file_path, check_diff_time=check_diff_time)
         salt = read_and_mask('SALT', file_path, check_diff_time=check_diff_time)
         if var_name == 'rho':
             return mask_3d(density(eosType, salt, temp, ref_depth, rhoConst=rhoConst, Tref=Tref, Sref=Sref, tAlpha=tAlpha, sBeta=sBeta), grid)
         elif var_name == 'tminustf':
             return t_minus_tf(temp, salt, grid)
     elif var_name in ['vnorm', 'valong']:
         u = read_and_mask('UVEL', file_path, check_diff_time=check_diff_time, gtype='u')
         v = read_and_mask('VVEL', file_path, check_diff_time=check_diff_time, gtype='v')
         if var_name == 'vnorm':
             return normal_vector(u, v, grid, point0, point1)
         elif var_name == 'valong':
             return parallel_vector(u, v, grid, point0, point1)
     elif var_name == 'tadv_along':
         tadv_x = read_and_mask('ADVx_TH', file_path, check_diff_time=check_diff_time)
         tadv_y = read_and_mask('ADVy_TH', file_path, check_diff_time=check_diff_time)
         return parallel_vector(tadv_x, tadv_y, grid, point0, point1)
     elif var_name == 'tdif_along':
         tdif_x = read_and_mask('DFxE_TH', file_path, check_diff_time=check_diff_time)
         tdif_y = read_and_mask('DFyE_TH', file_path, check_diff_time=check_diff_time)
         return parallel_vector(tdif_x, tdif_y, grid, point0, point1)
     else:
         if check_diff_time and diff_time:
             return mask_3d(read_netcdf(file_path, var_name, time_index=time_index_2, t_start=t_start_2, t_end=t_end_2, time_average=time_average), grid, gtype=gtype)
         else:
             return mask_3d(read_netcdf(file_path, var_name, time_index=time_index, t_start=t_start, t_end=t_end, time_average=time_average), grid, gtype=gtype)
예제 #8
0
def gl_final(file_path, fig_name=None, dpi=None):

    xGL = read_netcdf(file_path, 'xGL')
    yGL = read_netcdf(file_path, 'yGL')

    fig, ax = gl_frame(xGL, yGL, -1, label='Final')
    finished_plot(fig, fig_name, dpi=dpi)
예제 #9
0
def read_and_trim_diff(file_1, file_2, var_name):

    time_1 = netcdf_time(file_1, monthly=False)
    time_2 = netcdf_time(file_2, monthly=False)
    data_1 = read_netcdf(file_1, var_name)
    data_2 = read_netcdf(file_2, var_name)
    time, data_diff = trim_and_diff(time_1, time_2, data_1, data_2)
    return time, data_diff
예제 #10
0
def check_read_gl(gl_file, gl_time_index):

    if gl_file is not None:
        xGL = read_netcdf(gl_file, 'xGL', time_index=gl_time_index)
        yGL = read_netcdf(gl_file, 'yGL', time_index=gl_time_index)
    else:
        xGL = None
        yGL = None
    return xGL, yGL
예제 #11
0
def iceberg_meltwater(grid_path, input_dir, output_file, nc_out=None, prec=32):

    from plot_latlon import latlon_plot

    input_dir = real_dir(input_dir)
    file_head = 'icebergs_'
    file_tail = '.nc'

    print 'Building grids'
    # Read the NEMO grid from the first file
    # It has longitude in the range -180 to 180
    file_path = input_dir + file_head + '01' + file_tail
    nemo_lon = read_netcdf(file_path, 'nav_lon')
    nemo_lat = read_netcdf(file_path, 'nav_lat')
    # Build the model grid
    model_grid = Grid(grid_path, max_lon=180)

    print 'Interpolating'
    icebergs_interp = np.zeros([12, model_grid.ny, model_grid.nx])
    for month in range(12):
        print '...month ' + str(month + 1)
        # Read the data
        file_path = input_dir + file_head + '{0:02d}'.format(month +
                                                             1) + file_tail
        icebergs = read_netcdf(file_path, 'berg_total_melt', time_index=0)
        # Interpolate
        icebergs_interp_tmp = interp_nonreg_xy(nemo_lon,
                                               nemo_lat,
                                               icebergs,
                                               model_grid.lon_1d,
                                               model_grid.lat_1d,
                                               fill_value=0)
        # Make sure the land and ice shelf cavities don't get any iceberg melt
        icebergs_interp_tmp[model_grid.land_mask + model_grid.ice_mask] = 0
        # Save to the master array
        icebergs_interp[month, :] = icebergs_interp_tmp

    write_binary(icebergs_interp, output_file, prec=prec)

    print 'Plotting'
    # Make a nice plot of the annual mean
    latlon_plot(mask_land_ice(np.mean(icebergs_interp, axis=0), model_grid),
                model_grid,
                include_shelf=False,
                vmin=0,
                title=r'Annual mean iceberg melt (kg/m$^2$/s)')
    if nc_out is not None:
        # Also write to NetCDF file
        print 'Writing ' + nc_out
        ncfile = NCfile(nc_out, model_grid, 'xyt')
        ncfile.add_time(np.arange(12) + 1, units='months')
        ncfile.add_variable('iceberg_melt',
                            icebergs_interp,
                            'xyt',
                            units='kg/m^2/s')
        ncfile.close()
예제 #12
0
def read_plot_timeseries(var,
                         file_path,
                         precomputed=False,
                         grid=None,
                         lon0=None,
                         lat0=None,
                         fig_name=None,
                         monthly=True,
                         legend_in_centre=False,
                         dpi=None):

    # Set parameters (only care about title and units)
    title, units = set_parameters(var)[2:4]

    if precomputed:
        # Read the time array; don't need to back up one month
        time = netcdf_time(file_path, monthly=False)

    if var.endswith('mass_balance'):
        if precomputed:
            # Read the fields from the timeseries file
            shelf = var[:var.index('_mass_balance')]
            melt = read_netcdf(file_path, shelf + '_total_melt')
            freeze = read_netcdf(file_path, shelf + '_total_freeze')
        else:
            # Calculate the timeseries from the MITgcm file(s)
            time, melt, freeze = calc_special_timeseries(var,
                                                         file_path,
                                                         grid=grid,
                                                         monthly=monthly)
        timeseries_multi_plot(time, [melt, freeze, melt + freeze],
                              ['Melting', 'Freezing', 'Net'],
                              ['red', 'blue', 'black'],
                              title=title,
                              units=units,
                              monthly=monthly,
                              fig_name=fig_name,
                              dpi=dpi,
                              legend_in_centre=legend_in_centre)
    else:
        if precomputed:
            data = read_netcdf(file_path, var)
        else:
            time, data = calc_special_timeseries(var,
                                                 file_path,
                                                 grid=grid,
                                                 lon0=lon0,
                                                 lat0=lat0,
                                                 monthly=monthly)
        make_timeseries_plot(time,
                             data,
                             title=title,
                             units=units,
                             monthly=monthly,
                             fig_name=fig_name,
                             dpi=dpi)
예제 #13
0
 def read_and_mask (var_name, file_path, second_file_path=None, check_diff_time=False):
     # Do we need to choose the right file?
     if second_file_path is not None:
         file_path_use = find_variable(file_path, second_file_path, var_name)
     else:
         file_path_use = file_path
     # Read and mask the data
     if check_diff_time and diff_time:
         return mask_3d(read_netcdf(file_path_use, var_name, time_index=time_index_2, t_start=t_start_2, t_end=t_end_2, time_average=time_average), grid)
     else:
         return mask_3d(read_netcdf(file_path_use, var_name, time_index=time_index, t_start=t_start, t_end=t_end, time_average=time_average), grid)
예제 #14
0
def read_plot_hovmoller_ts(hovmoller_file,
                           loc,
                           grid,
                           smooth=0,
                           zmin=None,
                           zmax=None,
                           tmin=None,
                           tmax=None,
                           smin=None,
                           smax=None,
                           t_contours=None,
                           s_contours=None,
                           date_since_start=False,
                           ctype='basic',
                           t0=None,
                           s0=None,
                           title=None,
                           fig_name=None,
                           monthly=True,
                           figsize=(12, 7),
                           dpi=None,
                           return_fig=False):

    grid = choose_grid(grid, None)
    temp = read_netcdf(hovmoller_file, loc + '_temp')
    salt = read_netcdf(hovmoller_file, loc + '_salt')
    time = netcdf_time(hovmoller_file, monthly=False)
    loc_string = region_names[loc]
    return hovmoller_ts_plot(temp,
                             salt,
                             time,
                             grid,
                             smooth=smooth,
                             tmin=tmin,
                             tmax=tmax,
                             smin=smin,
                             smax=smax,
                             zmin=zmin,
                             zmax=zmax,
                             monthly=monthly,
                             t_contours=t_contours,
                             s_contours=s_contours,
                             loc_string=loc_string,
                             title=title,
                             date_since_start=date_since_start,
                             ctype=ctype,
                             t0=t0,
                             s0=s0,
                             figsize=figsize,
                             dpi=dpi,
                             return_fig=return_fig,
                             fig_name=fig_name)
예제 #15
0
def read_plot_slice (var, file_path, grid, lon0=None, lat0=None, time_index=None, t_start=None, t_end=None, time_average=False, hmin=None, hmax=None, zmin=None, zmax=None, vmin=None, vmax=None, date_string=None, fig_name=None, second_file_path=None):

    # Make sure we'll end up with a single record in time
    if time_index is None and not time_average:
        print 'Error (read_plot_slice): either specify time_index or set time_average=True.'
        sys.exit()

    if date_string is None and time_index is not None:
        # Determine what to write about the date
        date_string = parse_date(file_path=file_path, time_index=time_index)

    if not isinstance(grid, Grid):
        # This is the path to the NetCDF grid file, not a Grid object
        # Make a grid object from it
        grid = Grid(grid)

    # Read necessary variables from NetCDF file and mask appropriately
    if var in ['temp', 'tminustf']:
        # Read temperature. Some of these variables need more than temperature and so second_file_path might be set.
        if second_file_path is not None:
            file_path_use = find_variable(file_path, second_file_path, 'THETA')
        else:
            file_path_use = file_path        
        temp = mask_3d(read_netcdf(file_path_use, 'THETA', time_index=time_index, t_start=t_start, t_end=t_end, time_average=time_average), grid)
    if var in ['salt', 'tminustf']:
        if second_file_path is not None:
            file_path_use = find_variable(file_path, second_file_path, 'SALT')
        else:
            file_path_use = file_path
        salt = mask_3d(read_netcdf(file_path_use, 'SALT', time_index=time_index, t_start=t_start, t_end=t_end, time_average=time_average), grid)
    if var == 'u':
        u = mask_3d(read_netcdf(file_path, 'UVEL', time_index=time_index, t_start=t_start, t_end=t_end, time_average=time_average), grid, gtype='u')
    if var == 'v':
        v = mask_3d(read_netcdf(file_path, 'VVEL', time_index=time_index, t_start=t_start, t_end=t_end, time_average=time_average), grid, gtype='v')

    # Plot
    if var == 'temp':
        slice_plot(temp, grid, lon0=lon0, lat0=lat0, hmin=hmin, hmax=hmax, zmin=zmin, zmax=zmax, vmin=vmin, vmax=vmax, title=r'Temperature ($^{\circ}$C)', date_string=date_string, fig_name=fig_name)
    elif var == 'salt':
        slice_plot(salt, grid, lon0=lon0, lat0=lat0, hmin=hmin, hmax=hmax, zmin=zmin, zmax=zmax, vmin=vmin, vmax=vmax, title='Salinity (psu)', date_string=date_string, fig_name=fig_name)
    elif var == 'tminustf':
        slice_plot(t_minus_tf(temp, salt, grid), grid, lon0=lon0, lat0=lat0, hmin=hmin, hmax=hmax, zmin=zmin, zmax=zmax, vmin=vmin, vmax=vmax, ctype='plusminus', title=r'Difference from in-situ freezing point ($^{\circ}$C)', date_string=date_string, fig_name=fig_name)
    elif var == 'u':
        slice_plot(u, grid, gtype='u', lon0=lon0, lat0=lat0, hmin=hmin, hmax=hmax, zmin=zmin, zmax=zmax, vmin=vmin, vmax=vmax, ctype='plusminus', title='Zonal velocity (m/s)', date_string=date_string, fig_name=fig_name)
    elif var == 'v':
        slice_plot(v, grid, gtype='v', lon0=lon0, lat0=lat0, hmin=hmin, hmax=hmax, zmin=zmin, zmax=zmax, vmin=vmin, vmax=vmax, ctype='plusminus', title='Zonal velocity (m/s)', date_string=date_string, fig_name=fig_name)
    else:
        print 'Error (read_plot_slice): variable key ' + str(var) + ' does not exist'
        sys.exit()
예제 #16
0
def timeseries_transport_transect(file_path,
                                  grid,
                                  point0,
                                  point1,
                                  direction='N',
                                  time_index=None,
                                  t_start=None,
                                  t_end=None,
                                  time_average=False):

    # Read u and v
    u = mask_3d(read_netcdf(file_path,
                            'UVEL',
                            time_index=time_index,
                            t_start=t_start,
                            t_end=t_end,
                            time_average=time_average),
                grid,
                gtype='u',
                time_dependent=True)
    v = mask_3d(read_netcdf(file_path,
                            'VVEL',
                            time_index=time_index,
                            t_start=t_start,
                            t_end=t_end,
                            time_average=time_average),
                grid,
                gtype='v',
                time_dependent=True)
    if len(u.shape) == 3:
        # Just one timestep; add a dummy time dimension
        u = np.expand_dims(u, 0)
        v = np.expand_dims(v, 0)
    # Build the timeseries
    timeseries = []
    for t in range(u.shape[0]):
        # Get the "southward" and "northward" components
        trans_S, trans_N = transport_transect(u[t, :], v[t, :], grid, point0,
                                              point1)
        # Combine them
        if direction == 'N':
            trans = trans_N - trans_S
        elif direction == 'S':
            trans = trans_S - trans_N
        else:
            print 'Error (timeseries_transport_transect): invalid direction ' + direction
            sys.exit()
        timeseries.append(trans)
    return np.array(timeseries)
예제 #17
0
def timeseries_area_sfc(option,
                        file_path,
                        var_name,
                        grid,
                        gtype='t',
                        time_index=None,
                        t_start=None,
                        t_end=None,
                        time_average=False):

    # Read the data
    data = read_netcdf(file_path,
                       var_name,
                       time_index=time_index,
                       t_start=t_start,
                       t_end=t_end,
                       time_average=time_average)
    if len(data.shape) == 2:
        # Just one timestep; add a dummy time dimension
        data = np.expand_dims(data, 0)

    # Process one time index at a time to save memory
    timeseries = []
    for t in range(data.shape[0]):
        # Mask
        data_tmp = mask_land_ice(data[t, :], grid, gtype=gtype)
        # Area-average or integrate
        timeseries.append(over_area(option, data_tmp, grid, gtype=gtype))
    return np.array(timeseries)
예제 #18
0
def timeseries_area_threshold(file_path,
                              var_name,
                              threshold,
                              grid,
                              gtype='t',
                              time_index=None,
                              t_start=None,
                              t_end=None,
                              time_average=False):

    # Read the data
    data = read_netcdf(file_path,
                       var_name,
                       time_index=time_index,
                       t_start=t_start,
                       t_end=t_end,
                       time_average=time_average)
    if len(data.shape) == 2:
        # Just one timestep; add a dummy time dimension
        data = np.expand_dims(data, 0)
    # Convert to array of 1s and 0s based on threshold
    data = (data >= threshold).astype(float)
    # Now build the timeseries
    timeseries = []
    for t in range(data.shape[0]):
        timeseries.append(area_integral(data[t, :], grid, gtype=gtype))
    return np.array(timeseries)
예제 #19
0
def timeseries_avg_3d(file_path,
                      var_name,
                      grid,
                      gtype='t',
                      time_index=None,
                      t_start=None,
                      t_end=None,
                      time_average=False,
                      mask=None):

    data = read_netcdf(file_path,
                       var_name,
                       time_index=time_index,
                       t_start=t_start,
                       t_end=t_end,
                       time_average=time_average)
    if len(data.shape) == 3:
        # Just one timestep; add a dummy time dimension
        data = np.expand_dims(data, 0)
    # Process one time index at a time to save memory
    timeseries = []
    for t in range(data.shape[0]):
        if mask is None:
            data_tmp = mask_3d(data[t, :], grid, gtype=gtype)
        else:
            data_tmp = apply_mask(data[t, :],
                                  np.invert(mask),
                                  depth_dependent=True)
        # Volume average
        timeseries.append(volume_average(data_tmp, grid, gtype=gtype))
    return np.array(timeseries)
예제 #20
0
def timeseries_point_vavg(file_path,
                          var_name,
                          lon0,
                          lat0,
                          grid,
                          gtype='t',
                          time_index=None,
                          t_start=None,
                          t_end=None,
                          time_average=False):

    # Read the data
    data = read_netcdf(file_path,
                       var_name,
                       time_index=time_index,
                       t_start=t_start,
                       t_end=t_end,
                       time_average=time_average)
    if len(data.shape) == 3:
        # Just one timestep; add a dummy time dimension
        data = np.expand_dims(data, 0)
    # Interpolate to the point, and get hfac too
    data_point, hfac_point = interp_bilinear(data,
                                             lon0,
                                             lat0,
                                             grid,
                                             gtype=gtype,
                                             return_hfac=True)
    # Vertically average to get timeseries
    return vertical_average_column(data_point,
                                   hfac_point,
                                   grid,
                                   gtype=gtype,
                                   time_dependent=True)
예제 #21
0
def timeseries_domain_volume(file_path,
                             grid,
                             time_index=None,
                             t_start=None,
                             t_end=None,
                             time_average=False):

    # Read free surface
    eta = read_netcdf(file_path,
                      'ETAN',
                      time_index=time_index,
                      t_start=t_start,
                      t_end=t_end,
                      time_average=time_average)
    if len(eta.shape) == 2:
        # Just one timestep; add a dummy time dimension
        eta = np.expand_dims(eta, 0)
    # Calculate volume without free surface changes
    volume = np.sum(grid.dV)
    # Build the timeseries
    timeseries = []
    for t in range(eta.shape[0]):
        # Get volume change in top layer due to free surface
        volume_top = np.sum(eta[t, :] * grid.dA)
        timeseries.append(volume + volume_top)
    return np.array(timeseries)
예제 #22
0
def timeseries_max(file_path,
                   var_name,
                   grid,
                   gtype='t',
                   time_index=None,
                   t_start=None,
                   t_end=None,
                   time_average=False,
                   xmin=None,
                   xmax=None,
                   ymin=None,
                   ymax=None):

    data = read_netcdf(file_path,
                       var_name,
                       time_index=time_index,
                       t_start=t_start,
                       t_end=t_end,
                       time_average=time_average)
    if len(data.shape) == 2:
        # Just one timestep; add a dummy time dimension
        data = np.expand_dims(data, 0)

    num_time = data.shape[0]
    max_data = np.zeros(num_time)
    for t in range(num_time):
        max_data[t] = var_min_max(data[t, :],
                                  grid,
                                  gtype=gtype,
                                  xmin=xmin,
                                  xmax=xmax,
                                  ymin=ymin,
                                  ymax=ymax)[1]
    return max_data
예제 #23
0
 def read_process_data (file_path, var_name, grid, mask_option='3d', gtype='t', lev_option=None, ismr=False, psi=False):
     data = read_netcdf(file_path, var_name)
     if mask_option == '3d':
         data = mask_3d(data, grid, gtype=gtype, time_dependent=True)
     elif mask_option == 'except_ice':
         data = mask_except_ice(data, grid, gtype=gtype, time_dependent=True)
     elif mask_option == 'land':
         data = mask_land(data, grid, gtype=gtype, time_dependent=True)
     elif mask_option == 'land_ice':
         data = mask_land_ice(data, grid, gtype=gtype, time_dependent=True)
     else:
         print 'Error (read_process_data): invalid mask_option ' + mask_option
         sys.exit()
     if lev_option is not None:
         if lev_option == 'top':
             data = select_top(data)
         elif lev_option == 'bottom':
             data = select_bottom(data)
         else:
             print 'Error (read_process_data): invalid lev_option ' + lev_option
             sys.exit()
     if ismr:
         data = convert_ismr(data)
     if psi:
         data = np.sum(data, axis=-3)*1e-6
     return data
예제 #24
0
def read_plot_hovmoller(var_name,
                        hovmoller_file,
                        grid,
                        zmin=None,
                        zmax=None,
                        vmin=None,
                        vmax=None,
                        contours=None,
                        monthly=True,
                        fig_name=None,
                        figsize=(14, 5)):

    data = read_netcdf(hovmoller_file, var_name)
    # Set monthly=False so we don't back up an extra month (because precomputed)
    time = netcdf_time(hovmoller_file, monthly=False)
    title, units = read_title_units(hovmoller_file, var_name)

    grid = choose_grid(grid, None)

    # Make the plot
    hovmoller_plot(data,
                   time,
                   grid,
                   vmin=vmin,
                   vmax=vmax,
                   zmin=zmin,
                   zmax=zmax,
                   monthly=monthly,
                   contours=contours,
                   title=title,
                   fig_name=fig_name,
                   figsize=figsize)
예제 #25
0
def mit_ics (grid_path, source_file, output_dir, nc_out=None, prec=64):

    from file_io import NCfile, read_netcdf
    from interpolation import interp_reg

    output_dir = real_dir(output_dir)

     # Fields to interpolate
    fields = ['THETA', 'SALT', 'SIarea', 'SIheff', 'SIhsnow']
    # Flag for 2D or 3D
    dim = [3, 3, 2, 2, 2]
    # End of filenames for output
    outfile_tail = '_MIT.ini'

    print 'Building grids'
    source_grid = Grid(source_file)
    model_grid = Grid(grid_path)
    # Extract land mask of source grid
    source_mask = source_grid.hfac==0

    print 'Building mask for points to fill'
    # Select open cells according to the model, interpolated to the source grid
    fill = np.ceil(interp_reg(model_grid, source_grid, np.ceil(model_grid.hfac), fill_value=0)).astype(bool)
    # Extend into mask a few times to make sure there are no artifacts near the coast
    fill = extend_into_mask(fill, missing_val=0, use_3d=True, num_iters=3)

    # Set up a NetCDF file so the user can check the results
    if nc_out is not None:
        ncfile = NCfile(nc_out, model_grid, 'xyz')

    # Process fields
    for n in range(len(fields)):
        print 'Processing ' + fields[n]
        out_file = output_dir + fields[n] + outfile_tail
        # Read the January climatology
        source_data = read_netcdf(source_file, fields[n], time_index=0)
        # Discard the land mask, and extrapolate slightly into missing regions so the interpolation doesn't get messed up.
        print '...extrapolating into missing regions'
        if dim[n] == 3:
            source_data = discard_and_fill(source_data, source_mask, fill)
        else:
            # Just care about the surface layer
            source_data = discard_and_fill(source_data, source_mask[0,:], fill[0,:], use_3d=False)
        print '...interpolating to model grid'
        data_interp = interp_reg(source_grid, model_grid, source_data, dim=dim[n])
        # Fill the land mask with zeros
        if dim[n] == 3:
            data_interp[model_grid.hfac==0] = 0
        else:
            data_interp[model_grid.hfac[0,:]==0] = 0
        write_binary(data_interp, out_file, prec=prec)
        if nc_out is not None:
            print '...adding to ' + nc_out
            if dim[n] == 3:
                ncfile.add_variable(fields[n], data_interp, 'xyz')
            else:
                ncfile.add_variable(fields[n], data_interp, 'xy')

    if nc_out is not None:
        ncfile.close()    
예제 #26
0
def make_climatology (start_year, end_year, output_file, directory='./'):
    
    directory = real_dir(directory)

    # Copy the first file
    # This will make a skeleton file with 12 time records and all the right metadata; later we will overwrite the values of all the time-dependent variables.
    print 'Setting up ' + output_file
    shutil.copyfile(directory+str(start_year)+'.nc', output_file)

    # Find all the time-dependent variables
    var_names = time_dependent_variables(output_file)

    # Calculate the monthly climatology for each variable
    id_out = nc.Dataset(output_file, 'a')
    for var in var_names:
        print 'Processing ' + var

        # Start with the first year
        print '...' + str(start_year)
        data = id_out.variables[var][:]
        
        # Add subsequent years
        for year in range(start_year+1, end_year+1):
            print '...' + str(year)
            data += read_netcdf(directory+str(year)+'.nc', var)

        # Divide by number of years to get average
        data /= (end_year-start_year+1)
        # Overwrite in output_file
        id_out.variables[var][:] = data

    id_out.close()
예제 #27
0
 def read_data (var_name):
     # First choose the right file
     if second_file_path is not None:
         file_path_use = find_variable(file_path, second_file_path)
     else:
         file_path_use = file_path
     data = read_netcdf(file_path_use, var_name, time_index=time_index, t_start=t_start, t_end=t_end, time_average=time_average)
     return data
예제 #28
0
 def read_and_mask (var_name, check_second=False, gtype='t'):
     # Do we need to choose the right file?
     if check_second and second_file_path is not None:
         file_path_use = find_variable(file_path, second_file_path, var_name)
     else:
         file_path_use = file_path
     # Read and mask the data
     return mask_3d(read_netcdf(file_path_use, var_name, time_index=time_index, t_start=t_start, t_end=t_end, time_average=time_average), grid, gtype=gtype)
예제 #29
0
def calc_ice_prod (file_path, out_file, monthly=True):

    # Build the grid from the file
    grid = Grid(file_path)

    # Add up all the terms to get sea ice production at each time index
    ice_prod = read_netcdf(file_path, 'SIdHbOCN') + read_netcdf(file_path, 'SIdHbATC') + read_netcdf(file_path, 'SIdHbATO') + read_netcdf(file_path, 'SIdHbFLO')
    # Also need time
    time = netcdf_time(file_path, monthly=monthly)

    # Set negative values to 0
    ice_prod = np.maximum(ice_prod, 0)

    # Write a new file
    ncfile = NCfile(out_file, grid, 'xyt')
    ncfile.add_time(time)
    ncfile.add_variable('ice_prod', ice_prod, 'xyt', long_name='Net sea ice production', units='m/s')
    ncfile.close()
예제 #30
0
def fris_melt(file_path,
              grid,
              result='massloss',
              time_index=None,
              t_start=None,
              t_end=None,
              time_average=False,
              mass_balance=False):

    # Read ice shelf melt rate and convert to m/y
    ismr = convert_ismr(
        read_netcdf(file_path,
                    'SHIfwFlx',
                    time_index=time_index,
                    t_start=t_start,
                    t_end=t_end,
                    time_average=time_average))

    if mass_balance:
        # Split into melting and freezing
        ismr_positive = np.maximum(ismr, 0)
        ismr_negative = np.minimum(ismr, 0)

    if time_index is not None or time_average:
        # Just one timestep
        if mass_balance:
            melt = total_melt(ismr_positive, grid.fris_mask, result=result)
            freeze = total_melt(ismr_negative, grid.fris_mask, result=result)
            return melt, freeze
        else:
            return total_melt(ismr, grid.fris_mask, grid, result=result)
    else:
        # Loop over timesteps
        num_time = ismr.shape[0]
        if mass_balance:
            melt = np.zeros(num_time)
            freeze = np.zeros(num_time)
            for t in range(num_time):
                melt[t] = total_melt(ismr_positive[t, :],
                                     grid.fris_mask,
                                     grid,
                                     result=result)
                freeze[t] = total_melt(ismr_negative[t, :],
                                       grid.fris_mask,
                                       grid,
                                       result=result)
            return melt, freeze
        else:
            melt = np.zeros(num_time)
            for t in range(num_time):
                melt[t] = total_melt(ismr[t, :],
                                     grid.fris_mask,
                                     grid,
                                     result=result)
            return melt