예제 #1
0
    def from_netcdf(cls, path):
        """
        Construct a fuel moisture model from data stored in a netCDF file.
        
        :param path: the path to the netCDF4 file
        """
        import netCDF4

        logging.info("reading from netCDF file " + path)
        d = netCDF4.Dataset(path)
        ncfmc = d.variables['FMC_GC'][:, :, :]

        d0, d1, k = ncfmc.shape
        P = d.variables['FMC_COV'][:, :, :, :]

        logging.info(
            'fuel_moisture_model.from_netcdf: reading FMC_GC %s FMC_COV %s' %
            (inq(ncfmc), inq(P)))

        Tk = np.array([1.0, 10.0, 100.0]) * 3600

        fm = cls(ncfmc[:, :, :k - 2], Tk)

        fm.m_ext[:, :, k - 2:] = ncfmc[:, :, k - 2:]
        fm.P[:, :, :, :] = P

        logging.info(
            'fuel_moisture_model.from_netcdf: err %s' %
            np.max(np.abs(ncfmc[:, :, :k - 2] - fm.m_ext[:, :, :k - 2])))
        logging.info(
            'fuel_moisture_model.from_netcdf: extended state fmc %d fields + 2 parameters'
            % (k - 2))

        return fm
예제 #2
0
    def to_netcdf(self, path, data_vars):
        """
        Store the model in a netCDF file that attempts to be displayable
        using standard tools and loosely follows the WRF 'standard'.
        
        :param path: the path where to store the model
        :param data_vars: dictionary of additional variables to store for visualization
        """
        import netCDF4

        d = netCDF4.Dataset(path, 'w', format='NETCDF4')

        d0, d1, k = self.m_ext.shape

        d.createDimension('fuel_moisture_classes_stag', k)
        d.createDimension('south_north', d0)
        d.createDimension('west_east', d1)
        ncfmc = d.createVariable(
            'FMC_GC', 'f4',
            ('south_north', 'west_east', 'fuel_moisture_classes_stag'))
        ncfmc[:, :, :] = self.m_ext
        logging.info(
            'fuel_moisture_model.to_netcdf: writing extended state as FMC_GC %s covariance as FMC_COV %s'
            % (inq(self.m_ext), inq(self.P)))
        ncfmc_cov = d.createVariable(
            'FMC_COV', 'f4',
            ('south_north', 'west_east', 'fuel_moisture_classes_stag',
             'fuel_moisture_classes_stag'))
        ncfmc_cov[:, :, :, :] = self.P
        for v in data_vars:
            d.createVariable(v, 'f4',
                             ('south_north', 'west_east'))[:, :] = data_vars[v]

        d.close()
예제 #3
0
def write_geogrid_var(path_dir, varname, array, description, index, bits=32):
    """
    write geogrid dataset and index 
    """
    path_dir = osp.abspath(path_dir)
    logging.info('write_geogrid_var path_dir=%s varname=%s array=%s index=%s' %
                 (path_dir, varname, inq(array), str(index)))
    if not osp.exists(path_dir):
        os.makedirs(path_dir)

    # write geogrid dataset
    geogrid_ds_path = osp.join(path_dir, varname)
    index['description'] = addquotes(description)
    index['tile_bdr'] = 0

    write_geogrid(geogrid_ds_path, array, index, bits=bits)

    # write also the index as json entry to modify later
    index_json_path = osp.join(path_dir, 'index.json')
    try:
        index_json = json.load(open(index_json_path, 'r'))
    except:
        index_json = {}
    index_json[varname] = index
    json.dump(index_json,
              open(index_json_path, 'w'),
              indent=4,
              separators=(',', ': '))

    geogrid_tbl_var = {
        'name': varname,
        'dest_type': 'continuous',
        'interp_option': 'default:average_gcell(4.0)+four_pt+average_4pt',
        'abs_path': geogrid_ds_path,
        'priority': 1
    }

    # write a segment of GEOGRID.TBL
    geogrid_tbl_path = osp.join(path_dir, 'GEOGRID.TBL')
    write_table(geogrid_tbl_path,
                geogrid_tbl_var,
                mode='a',
                divider_after=True)

    # write also as json
    geogrid_tbl_json_path = osp.join(path_dir, 'geogrid_tbl.json')
    try:
        geogrid_tbl = json.load(open(geogrid_tbl_json_path, 'r'))
    except:
        geogrid_tbl = {}
    geogrid_tbl[varname] = geogrid_tbl_var
    json.dump(index,
              open(geogrid_tbl_json_path, 'w'),
              indent=4,
              separators=(',', ': '))

    json.dump(geogrid_tbl,
              open(geogrid_tbl_json_path, 'w'),
              indent=4,
              separators=(',', ': '))
예제 #4
0
    def __init__(self, m0, Tk=None, P0=None):
        """
        Initialize the model with given position and moisture levels.

        :param m0: the initial condition for the entire grid (shape: grid0 x grid1 x num_fuels)
        :param Tk: drying/wetting time constants of simulated fuels (one per fuel), default [1 10 100 1000]
        :param P0: initial state error covariance
        """
        logging.info('FuelMoistureModel.__init__ m0 %s, Tk %s, P0 %s' %
                     (inq(m0), inq(Tk), inq(P0)))
        self.Tk = np.array([1.0, 10.0, 100.0]) * 3600  # nominal fuel delays
        self.r0 = 0.05  # threshold rainfall [mm/h]
        self.rk = 8.0  # saturation rain intensity [mm/h]
        self.Trk = 14.0 * 3600  # time constant for wetting model [s]
        self.S = 2.5  # saturation intensity [dimensionless]

        s0, s1, k = m0.shape
        if Tk is not None:
            self.Tk = Tk
        dim = k + 2
        assert k == len(self.Tk)
        self.m_ext = np.zeros((s0, s1, dim))
        self.m_ext[:, :, :k] = m0

        # note: the moisture advance will proceed by fuel moisture types
        # thus we only need space for one class at a time
        self.m_i = np.zeros((s0, s1))
        #self.mn_i = np.zeros((s0,s1))
        self.rlag = np.zeros((s0, s1))
        self.equi = np.zeros((s0, s1))
        self.model_ids = np.zeros((s0, s1))

        self.EdA = np.zeros((s0, s1))
        self.EwA = np.zeros((s0, s1))

        # state covariance current and forecasted
        self.P = np.zeros((s0, s1, dim, dim))
        self.P2 = np.zeros((dim, dim))
        for s in np.ndindex((s0, s1)):
            self.P[s[0], s[1], :, :] = P0

        # fill out the fixed parts of the jacobian
        self.J = np.zeros((s0, s1, dim, dim))
        self.Jii = np.zeros((s0, s1))

        # note: the observation operator H is common for all dims
        self.H = np.zeros((k, dim))
예제 #5
0
def execute_da_step(model, model_time, covariates, covariates_names, fm10):
    """
    Execute a single DA step from the current state/extended parameters and covariance matrix using
    the <covariates> and observations <fm10>.  Assimilation time window is fixed at 60 mins.

    :param model: a FuelMoistureModel
    :param model_time: the current model time
    :param covariates: the covariate fields to take into account to model the spatial structure of the FM field
    :param covariates_names: strings with the names of the covariates
    :param fm10: the 10-hr fuel moisture observations
    """
    valid_times = [
        z for z in fm10.keys() if abs((z - model_time).total_seconds()) < 1800
    ]

    if len(valid_times) > 0:

        # retrieve all observations for current time
        obs_valid_now = []
        for z in valid_times:
            obs_valid_now.extend(fm10[z])

        logging.info('FMDA found %d valid observations at model time %s' %
                     (len(obs_valid_now), str(model_time)))

        fmc_gc = model.get_state()
        dom_shape = fmc_gc.shape[:2]

        logging.info('execute_da_step: model state fmc_gc %s' % inq(fmc_gc))
        # construct covariate storage
        Xd3 = min(len(covariates) + 1, len(obs_valid_now))
        logging.info('FMDA is using %d covariates: %s' %
                     (Xd3, ','.join(['fmc_gc[:,:,1]'] + covariates_names)))
        X = np.zeros((dom_shape[0], dom_shape[1], Xd3))
        X[:, :, 0] = fmc_gc[:, :, 1]
        for i, c in zip(list(range(Xd3 - 1)), covariates):
            X[:, :, i + 1] = covariates[i]

        # run the trend surface model (clamp output to [0.0 - 2.5] to be safe)
        Kf_fn, Vf_fn = fit_tsm(obs_valid_now, X)
        Kf_fn[Kf_fn < 0.0] = 0.0
        Kf_fn[Kf_fn > 2.5] = 2.5

        Kg = np.zeros((dom_shape[0], dom_shape[1], fmc_gc.shape[2]))

        # run the data assimilation step now
        logging.info(
            "FMDA mean Kf: %g Vf: %g state[0]: %g state[1]: %g state[2]: %g" %
            (np.mean(Kf_fn), np.mean(Vf_fn), np.mean(fmc_gc[:, :, 0]),
             np.mean(fmc_gc[:, :, 1]), np.mean(fmc_gc[:, :, 2])))
        model.kalman_update_single2(Kf_fn[:, :, np.newaxis],
                                    Vf_fn[:, :, np.newaxis, np.newaxis], 1, Kg)
        logging.info(
            "FMDA mean Kf: %g Vf: %g state[0]: %g state[1]: %g state[2]: %g" %
            (np.mean(Kf_fn), np.mean(Vf_fn), np.mean(fmc_gc[:, :, 0]),
             np.mean(fmc_gc[:, :, 1]), np.mean(fmc_gc[:, :, 2])))
    else:
        logging.warning(
            'FMDA no valid observations found, skipping data assimilation.')
예제 #6
0
    def to_geogrid(self, path, index, lats=[], lons=[]):
        """
        Store model to geogrid files
        """
        not_coord = len(lats) == 0 or len(lons) == 0
        test_latslons = True
        if not_coord:
            test_latslons = False

        logging.info(
            "fmda.fuel_moisture_model.to_geogrid path=%s lats %s lons %s" %
            (path, inq(lats), inq(lons)))
        logging.info("fmda.fuel_moisture_model.to_geogrid: geogrid_index=" +
                     str(index))
        ensure_dir(path)
        xsize, ysize, n = self.m_ext.shape
        if n != 5:
            logging.error('wrong number of extended state fields, expecting 5')

        if not not_coord:
            x = int(xsize * 0.5)
            y = int(ysize * 0.5)
            index.update({
                'known_x': float(y),
                'known_y': float(x),
                'known_lat': float(lats[x - 1, y - 1]),
                'known_lon': float(lons[x - 1, y - 1])
            })
            logging.info(
                "fmda.fuel_moisture_model.to_geogrid: geogrid updated=" +
                str(index))

        FMC_GC = np.zeros((xsize, ysize, 5))
        FMC_GC[:, :, :3] = self.m_ext[:, :, :3]
        if test_latslons:
            logging.info(
                "fmda.fuel_moisture_model.to_geogrid: storing lons lats to FMC_GC(:,:,4:5) to test in WRF against XLONG and XLAT"
            )
            FMC_GC[:, :, 3] = lons
            FMC_GC[:, :, 4] = lats
        FMEP = self.m_ext[:, :, 3:]

        write_geogrid_var(path, 'FMC_GC', FMC_GC, index, bits=32)
        write_geogrid_var(path, 'FMEP', FMEP, index, bits=32)
예제 #7
0
def write_geogrid(path, array, index, bits=32, scale=None, uscale=None):
    """
    Write geogrid dataset 
    :param path: the directory where the dataset is to be stored
    :param array: numpy array of real values, 2d or 3d
    :param index: json with geogrid index, with geolocation and description already set 
    :param bits: 16 or 32 (default)
    :param scale: numeric scale or None (default)
    :param uscale: numeric transform scale to change units or None (default)
    :param data_type: 'categorical' or 'continuous' (default)
    """

    logging.info('write_geogrid path=%s array=%s index=%s' %
                 (path, inq(array), str(index)))
    if not osp.exists(path):
        os.makedirs(path)
    # write binary data file
    a = np.array(array)
    if uscale != None:
        a = a * float(uscale)
    dims = a.shape
    if len(dims) < 3:
        dims = dims + (1, )
        a = np.reshape(a, dims)
    xsize, ysize, zsize = dims
    if scale is None:
        scale = 2**(np.ceil(np.log2(np.max(np.abs(a)))) - bits + 1)
    if scale != 1.:
        a = np.round(a / scale)
    if bits == 32:
        a = np.int32(a)
    elif bits == 16:
        a = np.int16(a)
    else:
        print('unsupported word size')
        sys.exit(1)
    a = a.transpose(2, 0, 1)
    logging.info('write_geogrid array min=%f max=%f avg=%f' %
                 (a.min(), a.max(), a.mean()))
    zsize, ysize, xsize = a.shape
    data_file = "00001-%05i.00001-%05i" % (xsize, ysize)
    data_path = osp.join(path, data_file)
    a.flatten().tofile(data_path)

    # write index
    index.update({
        'scale_factor': scale,
        'wordsize': bits // 8,
        'tile_x': xsize,
        'tile_y': ysize,
        'tile_z': zsize,
        'endian': sys.byteorder
    })
    index_path = osp.join(path, 'index')
    write_table(index_path, index)
예제 #8
0
def write_geogrid(path, array, index, bits=32):
    """
    Write geogrid dataset 
    :param path: the directory where the dataset is to be stored
    :param array: numpy array of real values, 2d or 3d
    :param index: json with geogrid index, with geolocation and description already set 
    :param bits: 16 or 32 (default)
    """

    logging.info('write_geogrid_var path=%s array=%s index=%s' %
                 (path, inq(array), str(index)))
    if not osp.exists(path):
        os.makedirs(path)
    # write binary data file
    a = np.array(array)
    dims = a.shape
    if len(dims) < 3:
        dims = dims + (1, )
    xsize, ysize, zsize = dims
    scale = 2**(np.ceil(np.log2(np.max(np.abs(a)))) - bits + 1)
    aa = np.round(a / scale)
    if bits == 32:
        aa = np.int32(aa)
    elif bits == 16:
        aa = np.int16(aa)
    else:
        print('unsupported word size')
        sys.exit(1)
    a = aa.transpose(2, 0, 1)
    zsize, ysize, xsize = a.shape
    data_file = "00001-%05i.00001-%05i" % (xsize, ysize)
    data_path = osp.join(path, data_file)
    a.flatten().tofile(data_path)

    # write index
    index.update({
        'type': 'continuous',
        'signed': 'yes',
        'scale_factor': scale,
        'wordsize': bits // 8,
        'tile_x': xsize,
        'tile_y': ysize,
        'tile_z': zsize,
        'endian': sys.byteorder
    })
    index_path = osp.join(path, 'index')
    write_table(index_path, index)
예제 #9
0
    def to_wps_format(self, path, index, lats, lons, time_tag):
        """
        Store model to wps format files
        """
        test_latslons = True

        logging.info(
            "fmda.fuel_moisture_model.to_wps_format path=%s lats %s lons %s" %
            (path, inq(lats), inq(lons)))
        logging.info("fmda.fuel_moisture_model.to_wps_format: index=" +
                     str(index))
        ny, nx, n = self.m_ext.shape
        if n != 5:
            logging.error('wrong number of extended state fields, expecting 5')

        m = 7
        var = np.zeros((ny, nx, m))
        var[:, :, :3] = self.m_ext[:, :, :3]
        var[:, :, 3] = lons
        var[:, :, 4] = lats
        var[:, :, 5:] = self.m_ext[:, :, 3:]
        arrs = [var[:, :, k].swapaxes(0, 1) for k in range(m)]
        startloc = "SWCORNER"
        startlat = lats[0, 0]
        startlon = lons[0, 0]
        params = {
            'ifv':
            5,
            'hdate':
            "{}:00:00".format(time_tag),
            'xfcst':
            0.,
            'map_source':
            "WRF-SFIRE Wildland Fire Information and Forecasting System",
            'field':
            ["FM1H", "FM10H", "FM100H", "FMXLON", "FMXLAT", "FMEP0", "FMEP1"],
            'units': ["1", "1", "1", "degrees", "degrees", "1", "1"],
            'desc': [
                "1h Fuel Moisture Content", "10h Fuel Moisture Content",
                "100h Fuel Moisture Content", "Longitude for testing",
                "Latitude for testing",
                "Drying/Wetting Equilibrium Adjustment",
                "Rain Equilibrium Adjustment"
            ],
            'xlvl':
            200100.,
            'nx':
            nx,
            'ny':
            ny,
            'iproj':
            3,
            'startloc':
            startloc,
            'startlat':
            startlat,
            'startlon':
            startlon,
            'dx':
            index['dx'],
            'dy':
            index['dy'],
            'xlonc':
            index['stdlon'],
            'truelat1':
            index['truelat1'],
            'truelat2':
            index['truelat2'],
            'earth_radius':
            index['radius'],
            'is_wind_earth_rel':
            0,
            'slab':
            arrs
        }
        WPSFormat.from_params(**params).to_file(
            osp.join(path, 'FMDA:{}'.format(time_tag)))
예제 #10
0
    def to_geogrid(self, path, index, lats, lons):
        """
        Store model to geogrid files
        """
        test_latslons=True

        logging.info("fmda.fuel_moisture_model.to_geogrid path=%s lats %s lons %s" % (path, inq(lats), inq(lons)))
        logging.info("fmda.fuel_moisture_model.to_geogrid: geogrid_index="+str(index))
        ensure_dir(path)
        xsize, ysize, n = self.m_ext.shape
        if n != 5:
            logging.error('wrong number of extended state fields, expecting 5')
        x=1
        y=1
        x=int(xsize*0.5)
        y=int(ysize*0.5)
        index.update({'known_x':float(y),'known_y':float(x),'known_lat':lats[x-1,y-1],'known_lon':lons[x-1,y-1]})
        logging.info("fmda.fuel_moisture_model.to_geogrid: geogrid updated="+str(index))

        FMC_GC = np.zeros((xsize, ysize, 5))
        FMC_GC[:,:,:3] = self.m_ext[:,:,:3]
        if test_latslons:
            logging.info("fmda.fuel_moisture_model.to_geogrid: storing lons lats to FMC_GC(:,:,4:5) to test in WRF against XLONG and XLAT")
            FMC_GC[:,:,3] = lons
            FMC_GC[:,:,4] = lats
        FMEP = self.m_ext[:,:,3:]
        index['units']=addquotes('1')
        
        write_geogrid_var(path,'FMC_GC',FMC_GC,'1h, 10h, 100h fuel moisture',index,bits=32)
        write_geogrid_var(path,'FMEP',FMEP,'fuel moisture drying/wetting and rain equilibrium adjustments',index,bits=32)
예제 #11
0
def write_geogrid_var(path_dir,varname,array,index,bits=32,coord=None):
    """
    write geogrid dataset and index 
    """
    path_dir=osp.abspath(path_dir)
    logging.info('write_geogrid_var path_dir=%s varname=%s array=%s index=%s' % (path_dir, varname, inq(array), str(index)))
    if not osp.exists(path_dir):
        os.makedirs(path_dir)

    # get information from src/geo/var_wisdom.py
    wisdom = get_wisdom(varname).copy()

    # write geogrid dataset
    geogrid_ds_path = osp.join(path_dir,varname)
    index['description'] = addquotes(wisdom.get('description',''))
    index['units'] = addquotes(wisdom.get('units',''))
    index['type'] = wisdom.get('type','continuous')
    index['signed'] = wisdom.get('signed','yes')
    bits = wisdom.get('bits',bits)
    scale = wisdom.get('scale',None)

    # some adds to index
    if 'category_range' in wisdom:
        index['category_min'] = wisdom['category_range'][0]
        index['category_max'] = wisdom['category_range'][1]
    if 'missing_value' in wisdom:
        index['missing_value'] = wisdom['missing_value']
    if 'tile_bdr' in wisdom:
        index['tile_bdr'] = wisdom['tile_bdr']

    # categorical substitution and interpolation
    if index['type'] == 'categorical':
        fill = wisdom.get('fill',{})
        array = fill_categories(array,fill,coord)

    write_geogrid(geogrid_ds_path,array,index,bits=bits,scale=scale)
 
    # write also the index as json entry to modify later
    index_json_path = osp.join(path_dir,'index.json')
    try:
        index_json = json.load(open(index_json_path,'r'))
    except:
        index_json = {}
    index_json[varname]=index
    json.dump(index_json,open(index_json_path,'w'), indent=4, separators=(',', ': ')) 

    geogrid_tbl_var = {'name': varname,
                   'dest_type': wisdom.get('type','continuous'),
                   'interp_option': wisdom.get('interp_option','default:average_gcell(4.0)+four_pt+average_4pt'),
                   'abs_path': geogrid_ds_path,
                   'priority': wisdom.get('priority',1)}

    # some adds to geogrid_tbl_var
    if 'fill_missing' in wisdom:
        geogrid_tbl_var['fill_missing'] = wisdom['fill_missing']
    if 'smooth_option' in wisdom:
        geogrid_tbl_var['smooth_option'] = wisdom['smooth_option']
    if 'subgrid' in wisdom:
        geogrid_tbl_var['subgrid'] = wisdom['subgrid']
    if 'add_opts' in wisdom:
        for key in wisdom['add_opts'].keys():
            geogrid_tbl_var[key] = wisdom['add_opts'][key]

    # write a segment of GEOGRID.TBL
    geogrid_tbl_path=osp.join(path_dir,'GEOGRID.TBL')
    write_table(geogrid_tbl_path, geogrid_tbl_var, mode='a', divider_after=True)

    # write also as json 
    geogrid_tbl_json_path = osp.join(path_dir,'geogrid_tbl.json')
    try:
        geogrid_tbl = json.load(open(geogrid_tbl_json_path,'r'))
    except:
        geogrid_tbl = {}
    geogrid_tbl[varname]=geogrid_tbl_var 
    json.dump(index,open(geogrid_tbl_json_path,'w'), indent=4, separators=(',', ': ')) 
    
    
    json.dump(geogrid_tbl,open(geogrid_tbl_json_path,'w'), indent=4, separators=(',', ': ')) 
예제 #12
0
def write_geogrid_var(path_dir, varname, array, index, bits=32, coord=None):
    """
    write geogrid dataset and index 
    """
    path_dir = osp.abspath(path_dir)
    logging.info('write_geogrid_var path_dir=%s varname=%s array=%s index=%s' %
                 (path_dir, varname, inq(array), str(index)))
    if not osp.exists(path_dir):
        os.makedirs(path_dir)

    # get information from src/geo/var_wisdom.py
    wisdom = get_wisdom(varname).copy()

    # write geogrid dataset
    geogrid_ds_path = osp.join(path_dir, varname)
    index['description'] = addquotes(wisdom.get('description', ''))
    index['units'] = addquotes(wisdom.get('units', ''))
    index['type'] = wisdom.get('type', 'continuous')
    index['signed'] = wisdom.get('signed', 'yes')
    bits = wisdom.get('bits', bits)
    scale = wisdom.get('scale', None)
    uscale = wisdom.get('unit_scale', None)

    # some adds to index
    if 'category_range' in wisdom:
        index['category_min'] = wisdom['category_range'][0]
        index['category_max'] = wisdom['category_range'][1]
    if 'missing_value' in wisdom:
        index['missing_value'] = wisdom['missing_value']
    if 'tile_bdr' in wisdom:
        index['tile_bdr'] = wisdom['tile_bdr']

    # categorical substitution and interpolation
    if index['type'] == 'categorical':
        fill = wisdom.get('fill', {})
        if isinstance(fill, str):
            fill_str = fill
            fill = {}
            if fill_str == 'from_file':
                fill_path = 'etc/vtables/fill.json'
                try:
                    fill_vars = json.load(open(fill_path, 'r'))
                except:
                    logging.warning(
                        'write_geogrid_var fail reading fill file {}'.format(
                            fill_path))
                fill_file = fill_vars.get(varname, '')
                if osp.exists(fill_file):
                    try:
                        df = pd.read_csv(fill_file,
                                         names=['from', 'to'],
                                         index_col=False)
                        cfrom = np.array(df.loc[1:, 'from'])
                        cto = np.array(df.loc[1:, 'to'])
                        rest_val = df.loc[0, 'from']
                        unique = np.unique(array)
                        rest_ind = np.array(
                            [u for u in unique if u not in cfrom])
                        fill = Dict({tuple(rest_ind): rest_val})
                        for k, key in enumerate(cfrom):
                            fill.update({key: cto[k]})
                    except Exception as e:
                        logging.warning(
                            'write_geogrid_var fail reading fill CSV file {}'.
                            format(fill_file))
                        logging.warning('with exception {}'.format(e))
        array = fill_categories(array, fill, coord)

    write_geogrid(geogrid_ds_path,
                  array,
                  index,
                  bits=bits,
                  scale=scale,
                  uscale=uscale)

    # write also the index as json entry to modify later
    index_json_path = osp.join(path_dir, 'index.json')
    try:
        index_json = json.load(open(index_json_path, 'r'))
    except:
        index_json = {}
    index_json[varname] = index
    json.dump(index_json,
              open(index_json_path, 'w'),
              indent=4,
              separators=(',', ': '))

    geogrid_tbl_var = {
        'name':
        varname,
        'dest_type':
        wisdom.get('type', 'continuous'),
        'interp_option':
        wisdom.get('interp_option',
                   'default:average_gcell(4.0)+four_pt+average_4pt'),
        'abs_path':
        geogrid_ds_path,
        'priority':
        wisdom.get('priority', 1)
    }

    # some adds to geogrid_tbl_var
    if 'fill_missing' in wisdom:
        geogrid_tbl_var['fill_missing'] = wisdom['fill_missing']
    if 'smooth_option' in wisdom:
        geogrid_tbl_var['smooth_option'] = wisdom['smooth_option']
    if 'subgrid' in wisdom:
        geogrid_tbl_var['subgrid'] = wisdom['subgrid']
    if 'add_opts' in wisdom:
        for key in wisdom['add_opts'].keys():
            geogrid_tbl_var[key] = wisdom['add_opts'][key]

    # write a segment of GEOGRID.TBL
    geogrid_tbl_path = osp.join(path_dir, 'GEOGRID.TBL')
    write_table(geogrid_tbl_path,
                geogrid_tbl_var,
                mode='a',
                divider_after=True)

    # write also as json
    geogrid_tbl_json_path = osp.join(path_dir, 'geogrid_tbl.json')
    try:
        geogrid_tbl = json.load(open(geogrid_tbl_json_path, 'r'))
    except:
        geogrid_tbl = {}
    geogrid_tbl[varname] = geogrid_tbl_var
    json.dump(index,
              open(geogrid_tbl_json_path, 'w'),
              indent=4,
              separators=(',', ': '))

    json.dump(geogrid_tbl,
              open(geogrid_tbl_json_path, 'w'),
              indent=4,
              separators=(',', ': '))