Example #1
0
def new_scaled_netcdf(ncfile_out, nc_interpol, times_out,
                      t_unit, station_names=None):
    """
    Create netCDF file for scaled results (same for all reanalyses)
    Returns the file object so that kernel functions can
    successively write variables to it.

    """

    if path.isfile(ncfile_out):  # raise exception if file exists
        raise FileExistsError("File already exists: {}".format(ncfile_out))

    # make netCDF outfile, variables are written in kernels
    rootgrp = nc_new_file(ncfile_out, fmt='NETCDF4')
    rootgrp.source      = 'Reanalysis data interpolated and scaled to stations'

    # dimensions
    n_station = len(nc_interpol.variables['station'][:])
    station = rootgrp.createDimension('station', n_station)
    time    = rootgrp.createDimension('time', len(times_out))

    # base variables
    time           = ncvar_add_time(rootgrp, units=t_unit,
                                    calendar='gregorian', dimensions=('time'),
                                    dtype='i8')
    station        = ncvar_add_station(rootgrp)
    latitude       = ncvar_add_latitude(rootgrp)
    longitude      = ncvar_add_longitude(rootgrp)
    height         = ncvar_add_ellipsoid_height(rootgrp)

    crs           = rootgrp.createVariable('crs','i4')
    crs.long_name = 'coordinate system'
    crs.grid_mapping_name = 'latitude_longitude'
    crs.longitude_of_prime_meridian = 0.0
    crs.semi_major_axis = 6378137
    crs.inverse_flattening = 298.2572236
    crs.wkt = 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]'

    # assign base variables
    time[:]      = times_out
    station[:]   = nc_interpol.variables['station'][:]
    latitude[:]  = nc_interpol.variables['latitude'][:]
    longitude[:] = nc_interpol.variables['longitude'][:]
    height[:]    = nc_interpol.variables['height'][:]

    # add station names to netcdf
    if station_names is not None:
        # first convert to character array
        names_out = nc.stringtochar(np.array(station_names, 'S32'))

        # create space in the netcdf
        _            = rootgrp.createDimension('name_strlen', 32)
        st           = rootgrp.createVariable('station_name', "S1", ('station', 'name_strlen'))
        st.standard_name = 'platform_name'
        st.units     = ''

        # add data
        st[:] = names_out

    return rootgrp
Example #2
0
def stringarray_to_chararray(arr, numchars=None):
    """
    Convert an string array to a character array with one extra dimension.

    Parameters
    ----------
    arr : array
        Array with numpy dtype 'SN', where N is the number of characters
        in the string.

    numchars : int
        Number of characters used to represent the string.  If numchar > N
        the results will be padded on the right with blanks.  The default,
        None will use N.

    Returns
    -------
    chararr : array
        Array with dtype 'S1' and shape = arr.shape + (numchars, ).

    """
    carr = netCDF4.stringtochar(arr)
    if numchars is None:
        return carr

    arr_numchars = carr.shape[-1]
    if numchars <= arr_numchars:
        raise ValueError('numchars must be >= %i' % (arr_numchars))
    chararr = np.zeros(arr.shape + (numchars, ), dtype='S1')
    chararr[..., :arr_numchars] = carr[:]
    return chararr
def write_to_file(filename,data,var,xtime):

  if os.path.isfile(filename):
    data_nc = netCDF4.Dataset(filename,'a', format='NETCDF3_64BIT_OFFSET')
  else:
    data_nc = netCDF4.Dataset(filename,'w', format='NETCDF3_64BIT_OFFSET')

    # Find dimesions
    ncells = data.shape[1]
    nsnaps = data.shape[0]

    # Declare dimensions
    data_nc.createDimension('nCells',ncells)
    data_nc.createDimension('StrLen',64)
    data_nc.createDimension('Time',None)

    # Create time variable    
    time = data_nc.createVariable('xtime','S1',('Time','StrLen')) 
    time[:] = netCDF4.stringtochar(xtime) 

  # Declear variables
  time = data_nc.dimensions['Time'].name
  ncells = data_nc.dimensions['nCells'].name
  data_var = data_nc.createVariable(var,np.float64,(time,ncells))

  # Set variables
  data_var[:,:] = data[:,:]
  data_nc.close()
Example #4
0
def write_collection(coll, nc_names, coll_properties, cra, nc):
    nc_names.instance_dim = INSTANCE_DIM
    coll.to_netcdf(nc, nc_names=nc_names, use_vlen=(not cra))
    string_id = nc_names.x_var[:(nc_names.x_var.find('_') + 1)]
    for pk, pv in coll_properties.items():
        vname = '{}{}'.format(string_id, pk)
        try:
            dtype = pv.dtype
        except AttributeError:
            dtype = type(pv[0])
        if dtype in (str, unicode):
            dtype = 'S1'
        if dtype == 'S1':
            pv = ['' if v is None else v for v in pv]
            max_char = len(max(pv, key=len))
            dim_name = vname + '_strlen'
            nc.createDimension(dim_name, max_char)
            var = nc.createVariable(vname,
                                    dtype,
                                    dimensions=(INSTANCE_DIM, dim_name))
            for i in range(len(var)):
                var[i] = stringtochar(np.array(pv[i], 'S' + str(max_char)))
        else:
            var = nc.createVariable(vname, dtype, dimensions=(INSTANCE_DIM, ))
            var[:] = pv
Example #5
0
 def define_forecasts(dataset, names, hours):
     dataset.createDimension("forecasts", len(names))
     var = dataset.createVariable("forecast_names", "c",
                                  ("forecasts", "string_length"))
     var[:] = netCDF4.stringtochar(np.array(names, dtype="S64"))
     var = dataset.createVariable("forecasts", "f", ("forecasts", ))
     var[:] = hours
def writeNC_dims(fn, scalarv, midSoil, midToto, ifcToto, hrus, hru_type):
    """ Write <vars>[hru] array in netCDF4 file,<fn> and variable of
        <varname> """

    print "writing output file"
    nc_out = nc4.Dataset(fn, 'w', format='NETCDF4')

    # Create dimensions
    dim_hru = nc_out.createDimension('hru', len(hrus))
    dim_scalarv = nc_out.createDimension('scalarv', scalarv)
    dim_midSoil = nc_out.createDimension('midSoil', midSoil)
    dim_midToto = nc_out.createDimension('midToto', midToto)
    dim_ifcToto = nc_out.createDimension('ifcToto', ifcToto)

    # --- Create HRU ID variable (can be either int or string)
    if hru_type == 'str':
        # string HRU (need to add string length)
        max_strlen = 20  # EC
        dim_str = nc_out.createDimension('strlen', max_strlen)
        hruId = nc_out.createVariable('hruId', 'S1', ('hru', 'strlen'))
        hruId[:] = nc4.stringtochar(
            np.asarray(hrus, dtype='S{}'.format(max_strlen)))
    else:
        # integer HRU
        hruId = nc_out.createVariable('hruId', 'i8', ('hru', ))  # edited EC
        hruId[:] = np.asarray(hrus, dtype='int')

    # add attribute
    hruId.long_name = 'USGS HUC12 ID'

    return nc_out
Example #7
0
 def define(dataset, metrics, areas, times, units):
     dataset.createDimension("time", len(times))
     dataset.createDimension("surface", 1)
     dataset.createDimension("metrics", len(metrics))
     dataset.createDimension("areas", len(areas))
     dataset.createDimension("string_length", 64)
     var = dataset.createVariable("metric_names", "c",
                                  ("metrics", "string_length"))
     var[:] = netCDF4.stringtochar(np.array(metrics, dtype="S64"))
     var = dataset.createVariable("area_names", "c",
                                  ("areas", "string_length"))
     var[:] = netCDF4.stringtochar(np.array(areas, dtype="S64"))
     var = dataset.createVariable("time", "d", ("time", ))
     var.units = units
     if len(times) > 0:
         var[:] = netCDF4.date2num(times, units=units)
def writeNC_dims(fn, hrus, hru_type):
    """ Write <vars>[hru] array in netCDF4 file,<fn> and variable of
        <varname> """

    print "starting output file"
    nc_out = nc4.Dataset(fn, 'w', format='NETCDF4')

    # Create dimensions
    #dim_hru = nc_out.createDimension('hru', len(hrus))
    dim_gru = nc_out.createDimension('gru', len(hrus))

    # --- Create HRU and GRU ID variables (can be either int or string)
    # --- currently these are all the same
    if hru_type == 'str':
        # string HRU (need to add string length)
        max_strlen = 20
        dim_str = nc_out.createDimension('strlen', max_strlen)
        gruId = nc_out.createVariable('gruId', 'S1', ('gru', 'strlen'))
        gruId[:] = nc4.stringtochar(
            np.asarray(hrus, dtype='S{}'.format(max_strlen)))
    else:
        gruId = nc_out.createVariable('gruId', 'i8', ('gru', ))
        gruId[:] = np.asarray(hrus, dtype='int')

    # add attributes
    gruId.long_name = 'ID of group of response unit (GRU) -- USGS HUC12 ID'

    return nc_out
Example #9
0
def stringarray_to_chararray(arr, numchars=None):
    """
    Convert an string array to a character array with one extra dimension.

    Parameters
    ----------
    arr : array
        Array with numpy dtype 'SN', where N is the number of characters
        in the string.

    numchars : int
        Number of characters used to represent the string.  If numchar > N
        the results will be padded on the right with blanks.  The default,
        None will use N.

    Returns
    -------
    chararr : array
        Array with dtype 'S1' and shape = arr.shape + (numchars, ).

    """
    carr = netCDF4.stringtochar(arr)
    if numchars is None:
        return carr

    arr_numchars = carr.shape[-1]
    if numchars <= arr_numchars:
        raise ValueError('numchars must be >= %i' % (arr_numchars))
    chararr = np.zeros(arr.shape + (numchars, ), dtype='S1')
    chararr[..., :arr_numchars] = carr[:]
    return chararr
Example #10
0
def write_out_fixed_forcing(filenameOut, nx, ny, ugx, ugy):

    fileOut = Dataset(filenameOut, "w", format="NETCDF3_CLASSIC")

    fileOut.climatology = 1

    fileOut.createDimension("strLen", 64)
    fileOut.createDimension("Time", 1)
    fileOut.createDimension("nx", nx)
    fileOut.createDimension("ny", ny)

    var = fileOut.createVariable("Time", "c", dimensions=["Time", "strLen"])

    year = 0
    month = 1
    day = 1
    hour = 0
    minute = 0
    second = 0

    timeStr = "%4.4i-%2.2i-%2.2i_%2.2i:%2.2i:%2.2i" % (year, month, day, hour,
                                                       minute, second)
    var[0, 0:19] = netCDF4.stringtochar(np.array(timeStr, 'S19'))

    varx = fileOut.createVariable("xAtmWind",
                                  "d",
                                  dimensions=["Time", "nx", "ny"])
    vary = fileOut.createVariable("yAtmWind",
                                  "d",
                                  dimensions=["Time", "nx", "ny"])

    varx[0, :, :] = ugx[:, :]
    vary[0, :, :] = ugy[:, :]

    fileOut.close()
Example #11
0
def main():

  # Command line arguments
  parser = argparse.ArgumentParser(description=
       'Changes the value of a named char variable in a netcdf file.',
       epilog='Written by A.Adcroft, 2013.')
  parser.add_argument('filename', type=str,
                      help='netcdf file to modify.')
  parser.add_argument('variable', type=str,
                      help='Name of char variable to change.')
  parser.add_argument('value', type=str,
                      help='Contents to change string to.')

  optCmdLineArgs = parser.parse_args()

  rg = Dataset(optCmdLineArgs.filename, 'a' );
  if optCmdLineArgs.variable in rg.variables:
    var = rg.variables[optCmdLineArgs.variable]
    dat = np.empty(1,'S'+repr(len(var)))
    dat[0] = optCmdLineArgs.value
    dc = stringtochar(dat)
    var[:] = dc
  else: error('"'+optCmdLineArgs.variable+'" was not found in "'+optCmdLineArgs.filename+'".')
  rg.close()
  print 'File "%s" updated.'%(optCmdLineArgs.filename)
Example #12
0
def main():

    # Command line arguments
    parser = argparse.ArgumentParser(
        description=
        'Changes the value of a named char variable in a netcdf file.',
        epilog='Written by A.Adcroft, 2013.')
    parser.add_argument('filename', type=str, help='netcdf file to modify.')
    parser.add_argument('variable',
                        type=str,
                        help='Name of char variable to change.')
    parser.add_argument('value',
                        type=str,
                        help='Contents to change string to.')

    optCmdLineArgs = parser.parse_args()

    rg = Dataset(optCmdLineArgs.filename, 'a')
    if optCmdLineArgs.variable in rg.variables:
        var = rg.variables[optCmdLineArgs.variable]
        dat = np.empty(1, 'S' + repr(len(var)))
        dat[0] = optCmdLineArgs.value
        dc = stringtochar(dat)
        var[:] = dc
    else:
        error('"' + optCmdLineArgs.variable + '" was not found in "' +
              optCmdLineArgs.filename + '".')
    rg.close()
    print 'File "%s" updated.' % (optCmdLineArgs.filename)
def writeNetCDFData(fn, prate, times, hrus):
    """ Write <vars>[time,hru] array in netCDF4 file,<fn> and variable of <varname> """
    ncfile = nc4.Dataset(fn,'w',format='NETCDF4')
  
    # Data should be 2D [time x hru]
    dim2size = prate.shape[1]
   
    dim_1 = ncfile.createDimension('time', None)  # time axis - record dimension
    dim_2 = ncfile.createDimension('hru', dim2size)  # hru axis
    max_strlen = 12  # EAC
    dim_3 = ncfile.createDimension('strlen', max_strlen) # string length for hrus EAC adde
  
    # Define dimensions variable to hold the var
    t = ncfile.createVariable('time', 'i4', ('time', ))
    hru = ncfile.createVariable('hru', 'S1', ('hru', 'strlen'))   # edited EAC
  
    # Define 2D variables to hold the data
   # AWW:  changed variable name to match use for cfsv2
    map = ncfile.createVariable('prate','f4',('time','hru',))  
   # tavg_2m = ncfile.createVariable('tmp_2m','f4',('time','hru',))  

   
    # Populate netcdf file variables
    t[:]      = times   # NM added
    hru[:]    = nc4.stringtochar(np.asarray(hrus, dtype='S{}'.format(max_strlen))) # EAC
    map[:,:]  = prate   # for mean area precip (map), etc.
   # tavg_2m[:,:] = tmp_2m
  
    # Attributes (should take more from input netcdf)
    # set time axis
    t.time_origin = '1970-JAN-01 00:00:00'
    t.title = 'Time'
    t.long_name = 'Time axis'
    t.units = 'seconds since 1970-01-01 00:00:00'
    t.calendar = 'Gregorian'
    t.axis = 'T'
    # set hru axis
    hru.title = 'HRU ID'
    hru.long_name = 'HRU axis'
    hru.axis = 'X'
  
    # set variable attributes

    # AW - commented out vars above and used temp_2m
    map.associate = 'time hru'
    map.units = 'kg/m^2/s'
    map.setMissing = '1.e+20'
    map.axis = 'TX'
    
    #tavg_2m.associate = 'time hru'  
    #tavg_2m.units = 'degrees C'
    #tavg_2m.setMissing = '1.e+20'
    #tavg_2m.axis = 'TX'
  
    # Write basic global attribute
    ncfile.history = 'Created ' + time.ctime(time.time())
    ncfile.source = os.path.dirname(os.path.abspath(__file__))+__file__[1:]
    
    ncfile.close()
Example #14
0
def str_list_to_char_arr(slist, maxlen) :
#---------------------------------------------------------------------------------------------------
   """
   Convert a list of regular python strings to a numpy character array of type '|S1', which is what
   is required by the netCDF4 module. The maximum length of each string in the output netcdf array
   is defined by maxlen. It's usually the last dimension in the variable declaration.
   """
   stype = 'S%d' % maxlen
   tarr = np.array(slist, dtype=stype)
   return nc4.stringtochar(tarr)
Example #15
0
def str_list_to_char_arr(slist, maxlen):
    #---------------------------------------------------------------------------------------------------
    """
   Convert a list of regular python strings to a numpy character array of type '|S1', which is what
   is required by the netCDF4 module. The maximum length of each string in the output netcdf array
   is defined by maxlen. It's usually the last dimension in the variable declaration.
   """
    stype = 'S%d' % maxlen
    tarr = np.array(slist, dtype=stype)
    return nc4.stringtochar(tarr)
Example #16
0
 def setUp(self):
     self.file = FILE_NAME
     nc = Dataset(FILE_NAME, 'w', format=FILE_FORMAT)
     nc.createDimension('n1', None)
     nc.createDimension('n2', n2)
     nc.createDimension('nchar', nchar)
     v = nc.createVariable('strings', 'S1', ('n1', 'n2', 'nchar'))
     for nrec in range(nrecs):
         datac = stringtochar(data)
         v[nrec] = datac[nrec]
     nc.close()
Example #17
0
 def setUp(self):
     self.file = FILE_NAME
     nc = Dataset(FILE_NAME,'w',format=FILE_FORMAT)
     nc.createDimension('n1',None)
     nc.createDimension('n2',n2)
     nc.createDimension('nchar',nchar)
     v = nc.createVariable('strings','S1',('n1','n2','nchar'))
     for nrec in range(nrecs):
         datac = stringtochar(data)
         v[nrec] = datac[nrec]
     nc.close()
Example #18
0
 def setUp(self):
     self.file = FILE_NAME
     nc = Dataset(FILE_NAME, "w", format=FILE_FORMAT)
     nc.createDimension("n1", None)
     nc.createDimension("n2", n2)
     nc.createDimension("nchar", nchar)
     v = nc.createVariable("strings", "S1", ("n1", "n2", "nchar"))
     for nrec in range(nrecs):
         datac = stringtochar(data)
         v[nrec] = datac[nrec]
     nc.close()
Example #19
0
    def __setitem__(self, idx, value):
        attributes = self.attributes
        type = attributes['ftype']
        if type == "M": value = value.astype("datetime64[ms]").astype("f8")
        if type == 'S1':
            if not self.isChar:
                value = stringtochar(
                    np.array(value).astype("S{}".format(
                        self.variable.shape[1])))

        value = setT(self.tattributes, value)
        self.variable.__setitem__(idx, value)
Example #20
0
 def setstr(self, k, s, dim=128):
     name = self[k]
     nc = self.nc_node
     if not isinstance(dim, str) or dim is None:
         if dim is None:
             dim = len(s)
         dim_name = f'{name}_slen'
         nc.createDimension(dim_name, dim)
     else:
         dim_name = dim
     v = nc.createVariable(name, 'S1', (dim_name, ))
     v[:] = netCDF4.stringtochar(np.array([s], dtype=f'S{dim}'))
     return v
Example #21
0
def writeNC_trialParam(fn,  var_out, varname, attNames, attContents, hrus, hru_type):    
    """ Write <vars>[hru] array in netCDF4 file,<fn> and variable of
        <varname> """

    print "writing output file"
    ncfile = nc4.Dataset(fn, 'w', format='NETCDF4')

    # Data should be 1D [hru]
    dimSize = var_out.shape[0]
    dim_1 = ncfile.createDimension('hru', dimSize)  # hru axis

    # ==== Populate netcdf file variables =====
    
    # --- Create HRU ID variable (can be either int or string)
    if hru_type == 'str':
        # string HRU (need to add string length)
        max_strlen = 20  # EC
        dim_str = ncfile.createDimension('strlen', max_strlen)
        hruId = ncfile.createVariable('hruID', 'S1', ('hru', 'strlen'))  
        hruId[:] = nc4.stringtochar(np.asarray(hrus,
                                  dtype='S{}'.format(max_strlen)))         
    else:
        # integer HRU
        hruId = ncfile.createVariable('hruID', 'i8', ('hru', ))   # edited EC
        hruId[:] = np.asarray(hrus, dtype='int')
        #hruId[:] = np.asarray(hrus, dtype='i8')        # AW for HUCs
    # add attribute    
    hruId.long_name = 'USGS HUC12 ID'
        
    # geophysical var data and attributes from input file
    print "adding data & attributes"
    ncvar = ncfile.createVariable(varname,var_out.dtype,('hru',),fill_value='-999.0')    
    ncvar[:] = var_out   # store mean areal data in netcdf file
    # for i in range(len(attNames)):
        # ncvar.setncatts({attNames[i]: attContents[i]})

    # -- from another script
    # Copy the variable attributes
    # dsout.variables[v_name].setncatts({k: varin.getncattr(k) for k in varin.ncattrs()})
    # Create the variables in the file
    #for v_name, varin in dsin.variables.iteritems():
    #    dsout.createVariable(v_name, varin.datatype, varin.dimensions)
    # Copy the variables values
    #dsout.variables[v_name][:] = uuu[:]
    # -- end ... could be useful syntax --

    # Write basic global attribute
    ncfile.history = 'Created ' + time.ctime(time.time())
    ncfile.source = os.path.dirname(os.path.abspath(__file__))+__file__[1:]

    ncfile.close()
def _create_ncvar(dic, dataset, name, dimensions):
    """
    Create and fill a Variable in a netCDF Dataset object.

    Parameters
    ----------
    dic : dict
        Radar dictionary to containing variable data and meta-data
    dataset : Dataset
        NetCDF dataset to create variable in.
    name : str
        Name of variable to create.
    dimension : tuple of str
        Dimension of variable.

    """
    # create array from list, etc.
    data = dic['data']
    if isinstance(data, np.ndarray) is not True:
        print "Warning, converting non-array to array:", name
        data = np.array(data)

    # convert string array to character arrays
    if data.dtype.char is 'S' and data.dtype != 'S1':
        import netCDF4
        data = netCDF4.stringtochar(data)

    # create the dataset variable
    if 'least_significant_digit' in dic:
        lsd = dic['least_significant_digit']
    else:
        lsd = None
    if "_FillValue" in dic:
        fill_value = dic['_FillValue']
    else:
        fill_value = None

    ncvar = dataset.createVariable(name, data.dtype, dimensions,
                                   zlib=True, least_significant_digit=lsd,
                                   fill_value=fill_value)

    # set all attributes
    for key, value in dic.iteritems():
        if key not in ['data', '_FillValue']:
            ncvar.setncattr(key, value)

    # set the data
    if data.shape == ():
        data.shape = (1,)
    ncvar[:] = data[:]
Example #23
0
 def setUp(self):
     self.file = FILE_NAME
     nc = Dataset(FILE_NAME,'w',format=FILE_FORMAT)
     nc.createDimension('n1',None)
     nc.createDimension('n2',n2)
     nc.createDimension('nchar',nchar)
     v = nc.createVariable('strings','S1',('n1','n2','nchar'))
     v2 = nc.createVariable('strings2','S1',('n1','n2','nchar'))
     # if _Encoding set, string array should automatically be converted
     # to a char array and vice-versan
     v2._Encoding = 'ascii'
     v3 = nc.createVariable('strings3','S1',('n1','n2','nchar'))
     v3._Encoding = 'ascii'
     for nrec in range(nrecs):
         datac = stringtochar(data,encoding='ascii')
         v[nrec] = datac[nrec]
     v2[:-1] = data[:-1]
     v2[-1] = data[-1]
     v2[-1,-1] = data[-1,-1] # write single element
     v2[-1,-1] = data[-1,-1].tostring() # write single python string
     # _Encoding should be ignored if an array of characters is specified
     v3[:] = stringtochar(data, encoding='ascii')
     nc.close()
Example #24
0
 def setUp(self):
     self.file = FILE_NAME
     nc = Dataset(FILE_NAME, 'w', format=FILE_FORMAT)
     nc.createDimension('n1', None)
     nc.createDimension('n2', n2)
     nc.createDimension('nchar', nchar)
     v = nc.createVariable('strings', 'S1', ('n1', 'n2', 'nchar'))
     v2 = nc.createVariable('strings2', 'S1', ('n1', 'n2', 'nchar'))
     # if _Encoding set, string array should automatically be converted
     # to a char array and vice-versan
     v2._Encoding = 'ascii'
     v3 = nc.createVariable('strings3', 'S1', ('n1', 'n2', 'nchar'))
     v3._Encoding = 'ascii'
     for nrec in range(nrecs):
         datac = stringtochar(data, encoding='ascii')
         v[nrec] = datac[nrec]
     v2[:-1] = data[:-1]
     v2[-1] = data[-1]
     v2[-1, -1] = data[-1, -1]  # write single element
     v2[-1, -1] = data[-1, -1].tostring()  # write single python string
     # _Encoding should be ignored if an array of characters is specified
     v3[:] = stringtochar(data, encoding='ascii')
     nc.close()
Example #25
0
 def create_header(self, specobj):
     for varname, attrname in header_variables.items():
         #print varname
         if varname == 'Header.Mode.Bitcode':
             bcode = getattr(specobj.mode, attrname)
             self.nc.variables[
                 'Header.Mode.Bitcode'][:len(bcode)] = netCDF4.stringtochar(
                     numpy.array([bcode]))
         elif varname == 'Header.Mode.roach_num':
             self.nc.variables[varname][:] = getattr(specobj, attrname)
         elif varname == 'Header.Telescope.source_name':
             sname = specobj.source_name
             self.nc.variables['Header.Telescope.source_name'][:len(
                 sname)] = netCDF4.stringtochar(numpy.array([sname]))
         elif varname == 'Header.Telescope.obspgm':
             obspgm = specobj.obspgm
             self.nc.variables['Header.Telescope.obspgm'][:len(
                 obspgm)] = netCDF4.stringtochar(numpy.array([obspgm]))
         elif varname in ('Header.Telescope.ObsNum',
                          'Header.Telescope.SubObsNum',
                          'Header.Telescope.ScanNum'):
             self.nc.variables[varname][:] = getattr(specobj, attrname)
         else:
             self.nc.variables[varname][:] = getattr(specobj.mode, attrname)
Example #26
0
def addImage2netcdf(image, ncFile, granule, imgTime):

    dataset = nc.Dataset(ncFile, 'a')

    ### Updating time
    time = dataset.variables['time']
    name = dataset.variables['granule']
    data = dataset.variables['image']
    numGranules = time.shape[0]
    time[numGranules] = nc.date2num(imgTime,
                                    units=time.units,
                                    calendar=time.calendar)
    name[numGranules] = nc.stringtochar(np.array(granule, 'S100'))
    data[numGranules, :, :] = image

    dataset.close()
Example #27
0
    def __setitem__(self, idx, value):
        """
    Set data based on the query
    """

        vname, idx = self.__checkVariable(idx)
        if self.variables[vname]['type'] == "S":
            value = stringtochar(
                np.array(value).astype("S{}".format(self.shape[1])))

        # if self.variables[vname]['type']!="S":
        value = checkValue(value, idx, self.shape)

        if isQuickSet(idx, self.ndata, self.master, self.child):
            self._quickSet(vname, self._getPartIndex(idx), value)
        else:
            self.__subrun(vname, idx, value=value)
Example #28
0
def strlist_to_netcdf(a, dim=None):
    """Convert a list of strings to a netCDF4 chararray.

    Arguments:
        a: a list of strings
        dim (optionnal): netCDF4 dimensions

    Returns:
        netCDF4 chararray
    """
    if dim is None:
        dim = (len(a), max(len(s) for s in a))
    else:
        dim = tuple(len(d) for d in dim)
    array = np.chararray(dim)
    array[:] = [list(s.ljust(dim[1])) for s in a]
    return netCDF4.stringtochar(array)
Example #29
0
    def OpenO(self, nc_file_name):
        """Opens NetCDF file for output.
    
    Opens a NetCDF file of nc_file_name for writing (output).

    Parameters
    ----------
    nc_file_name : str
      Name of netCDF file you wish to write to.

    """

        if self.__openI:
            print('ERROR: Cannot set input and output in same instance')
            sys.exit(-1)
        if self.__openO:
            print('ERROR: Cannot open file twice!')
            sys.exit(-1)

        # Conventions
        self.__nc_file = Dataset(nc_file_name, 'w', format='NETCDF4')
        self.__nc_file.Conventions = 'AMBER'
        self.__nc_file.ConventionVersion = '1.0'
        self.__nc_file.program = 'LammpsPy.py'
        self.__nc_file.programVersion = '1.0'

        # Dimensions except number of particles
        self.__nc_file.createDimension('frame', 0)
        self.__nc_file.createDimension('spatial', 3)
        self.__nc_file.createDimension('cell_spatial', 3)
        self.__nc_file.createDimension('cell_angular', 3)
        self.__nc_file.createDimension('label', 5)

        # Preliminary variables
        char_sp_var = self.__nc_file.createVariable('char spatial', 'S1',
                                                    ('spatial'))
        char_sp_var[:] = ['x', 'y', 'z']
        char_csp_var = self.__nc_file.createVariable('char cell_spatial', 'S1',
                                                     ('cell_spatial'))
        char_csp_var[:] = ['a', 'b', 'c']
        char_sp_var = self.__nc_file.createVariable('char cell_angular', 'S1',
                                                    ('cell_angular', 'label'))
        char_sp_var[:] = stringtochar(np.array(['alpha', 'beta', 'gamma']))

        # notes OpenO has been run
        self.__openO = True
Example #30
0
def GetDim(NCData,var,shape,DimDict,i,istime):
	output=[]
	#grab type
	try:
		val_type=str(var.dtype)
	except AttributeError:
		val_type=type(var)
	#grab dimension
	for dim in range(0,i): #loop on the dimensions
		if type(shape[0])==int: 
			try:
				output=output+[str(DimDict[shape[dim]])] #test if the dimension allready exist
			except KeyError: #if not create it
				if (shape[dim])>1:
					index=len(DimDict)+1
					NewDim=NCData.createDimension('Dimension'+str(index),(shape[dim]))
					DimDict[len(NewDim)]='Dimension'+str(index)
					output=output+[str(DimDict[shape[dim]])]
					print('Defining dimension ' +'Dimension'+str(index))
		elif type(shape[0])==str:#dealling with a dictionnary
			try:
				output=[str(DimDict[numpy.shape(shape)[0]])]+['DictDim']
			except KeyError:
				index=len(DimDict)+1
				NewDim=NCData.createDimension('Dimension'+str(index),numpy.shape(shape)[0])
				DimDict[len(NewDim)]='Dimension'+str(index)
				output=[str(DimDict[numpy.shape(dict.keys(var))[0]])]+['Dimension6']
				print('Defining dimension ' +'Dimension'+str(index))
			break
	if istime:
		output=output+['Dimension4']
	#dealing with char and not string as we should so we need to had a string length
	if val_type=='bool' or val_type==collections.OrderedDict or val_type==list:
		charvar=stringtochar(numpy.array(var))
		stringlength=charvar.shape[charvar.ndim-1]
		try:
			output=output+[str(DimDict[stringlength])] #test if the dimension allready exist
		except KeyError: #if not create it
			if (shape[dim])>1:
				index=len(DimDict)+1
				NewDim=NCData.createDimension('Dimension'+str(index),(stringlength))
				DimDict[len(NewDim)]='Dimension'+str(index)
				output=output+[str(DimDict[stringlength])]
				print('Defining dimension ' +'Dimension'+str(index))
	return tuple(output), DimDict
def create_netcdf_label():
    execute_create_netcdf_label_start_time = datetime.datetime.now()
    # extract one hot encoding column
    labels = [col[0] for col in listOfDiacritizedCharacter]

    # convert unicode to string
    labels = [str(x) for x in labels]

    # convert array of string to array of char to be compatible with NETCDF
    # you will find strange values, but do not worry, it will exported correctly
    labels = netcdf_helpers.stringtochar(np.array(labels))

    global purified_labels
    purified_labels = []
    purified_labels = labels

    execute_create_netcdf_label_end_time = datetime.datetime.now()
    print "create_netcdf_label takes : ", execute_create_netcdf_label_end_time - execute_create_netcdf_label_start_time
Example #32
0
    def process(self):
        """
        Run all relevant processes and save data. Each kernel processes one
        variable and adds it to the netCDF file.
        """

        if not path.isdir(path.dirname(self.output_file)):
            makedirs(path.dirname(self.outfile))

        self.rg = new_scaled_netcdf(self.output_file,
                                    self.nc_pl,
                                    self.times_out_nc,
                                    t_unit=self.scaled_t_units)

        # add station names to netcdf
        # first convert to character array
        names_out = nc.stringtochar(
            np.array(self.stations['station_name'], 'S32'))

        # create space in the netcdf
        _ = self.rg.createDimension('name_strlen', 32)
        st = self.rg.createVariable('station_name', "S1",
                                    ('station', 'name_strlen'))
        st.standard_name = 'platform_name'
        st.units = ''

        # add data
        st[:] = names_out

        # iterate through kernels and start process
        for kernel_name in self.kernels:
            if hasattr(self, kernel_name):
                print(kernel_name)
                getattr(self, kernel_name)()
            else:
                logger.error(f"Missing kernel {kernel_name}")

        # self.conv_geotop()

        # close netCDF files
        self.rg.close()
        self.nc_pl.close()
        self.nc_sf.close()
        self.nc_sa.close()
Example #33
0
def save_string_list(group, membername, stringarray, dimensionname):

    # Compute length of strings
    max_length = max([ len(element) if (element is not None) else 0 for element in stringarray ]) + 1

    # Name used to store length of strings
    lengthname = STRING_LENGTH_DIMENSION_FORMAT.format(membername)

    # Build new variable
    group.createDimension(lengthname, max_length)
    group.createVariable(membername, 'S1', [dimensionname, lengthname])

    # Get rid of the behaviour in which None elements map to the string 'None'
    # (we want them just to be empty strings)
    stringarray_filtered = [ s if (s is not None ) else '' for s in stringarray ]
    
    # Convert to NumPy at max_length (which includes a null terminator)
    stringrray_numpy = numpy.array(stringarray_filtered, dtype=('S' + str(max_length)))

    # Set in output
    group.variables[membername][:] = stringtochar(stringrray_numpy)
Example #34
0
    def AddVariable(self,
                    name,
                    dimnames,
                    values,
                    dtype='f4',
                    attr={},
                    indent=''):
        """
        Add variable with data.
        
        Optional arguments:
          dtype     : 'f4' | 'i4' | ...
                      if not defined, value.dtype is used
          attr      : optional attributes
          indent    : specifiy whitespace to indent logging messages
        """

        # modules:
        import logging

        # info ...
        logging.debug(indent + 'add variable "%s" ...' % name)

        # data type:
        if dtype == None: dtype = values.dtype

        # create::
        varid = self.ncid.createVariable(name, dtype, dimnames)
        # fill data:
        if values != None:
            if dtype == 'S1':
                varid[:] = netCDF4.stringtochar(values)
            else:
                varid[:] = values
            #endif
        #endif

        # attributes:
        for key in attr.keys():
            varid.setncattr(key, attr[key])
Example #35
0
 def addvar(self, tag, typ, dim, data=None, attributes=None):
     thistype = {'float': 'f', 'double': 'd', 'char': 'S1', 'int': 'i'}[typ]
     if dim == None:
         vlvar = self.nc.createVariable(tag, thistype)
     else:
         if type(dim) == type(()):
             vlvar = self.nc.createVariable(tag, thistype, dim)
         else:
             vlvar = self.nc.createVariable(tag, thistype, (dim, ))
     if attributes != None:
         for k in attributes:
             vlvar.setncattr(k, attributes[k])
     if data != None:
         if typ == 'char':
             print dim
             print data
             x = numpy.chararray(self.dimensions[dim[0]],
                                 itemsize=self.dimensions[dim[1]])
             x[:] = data
             self.nc.variables[tag][:] = netCDF4.stringtochar(x)
         else:
             self.nc.variables[tag][:] = data
Example #36
0
def write_means_nc(configdict, RMSE, MAE, counts):
    exists = False
    # first check to see if file exists
    exists = True if os.path.isfile(configdict['outfile']) else False
    if not exists:
        with nc.Dataset(configdict['outfile'], mode='w') as outfile:
            # create dimensions and variables
            time_dim = outfile.createDimension("time", None)
            str_dim = outfile.createDimension("timestrlen", 10)
            timestamp = outfile.createVariable("timestamp", "S1",
                                               ("time", "timestrlen"))
            timestamp.long_name = "valid time in YYYYMMDDHH"
            for vname in configdict['variables']:
                varstr = vname + '_RMSE'
                outvar1 = outfile.createVariable(varstr, "f4", ("time"))
                varstr = vname + '_MAE'
                outvar2 = outfile.createVariable(varstr, "f4", ("time"))
                varstr = vname + '_counts'
                outvar3 = outfile.createVariable(varstr, "i4", ("time"))
    with nc.Dataset(configdict['outfile'], mode='a') as outfile:
        # determine index in unlimited dimension
        timestamp = outfile.variables['timestamp']
        idx = timestamp.shape[0]
        # write out data to file
        tmpstr = np.array(configdict['timestamp'].strftime('%Y%m%d%H'),
                         dtype="S10")
        timestamp[idx,:] = nc.stringtochar(tmpstr)
        for vname in configdict['variables']:
            varstr = vname + '_RMSE'
            outvar = outfile.variables[varstr]
            outvar[idx] = RMSE[vname]
            varstr = vname + '_MAE'
            outvar = outfile.variables[varstr]
            outvar[idx] = MAE[vname]
            varstr = vname + '_counts'
            outvar = outfile.variables[varstr]
            outvar[idx] = counts[vname]
Example #37
0
def SaveFile(filename, y, lon, lat, names):
    """ Saves y into netcdf file under filename in datadirectory """
    datadir = GetDir()

    savefile = datadir + filename + '.nc'
    print(savefile)
    # Copy dimension and information for meta data
    dataset = netCDF4.Dataset(savefile, 'w', format='NETCDF4_CLASSIC')

    print(dataset)
    # create dimensions
    lonlat = dataset.createDimension('lonlat', len(lon) * len(lat))
    strlen = dataset.createDimension('strlen', None)

    samples = dataset.createDimension('samples', len(names))
    # create variables
    samples_list = dataset.createVariable('samples_list', 'S1',
                                          ('samples', 'strlen'))
    sample_str = netCDF4.stringtochar(np.array(names))
    samples_list[:] = sample_str

    y_pred = dataset.createVariable('y_pred', np.float64,
                                    ('samples', 'lonlat'))
    y_pred[:, :] = y
Example #38
0
def write_out_time_varying_forcing(filenameOut, nx, ny, ugx, ugy):

    # time varying forcing
    fileOut = Dataset(filenameOut, "w", format="NETCDF3_CLASSIC")

    fileOut.climatology = 0

    fileOut.createDimension("strLen", 64)
    fileOut.createDimension("Time", 10)
    fileOut.createDimension("nx", nx)
    fileOut.createDimension("ny", ny)

    var = fileOut.createVariable("Time", "c", dimensions=["Time", "strLen"])

    year = 1
    month = 1
    hour = 0
    minute = 0
    second = 0

    for day in range(0, 10):
        timeStr = "%4.4i-%2.2i-%2.2i_%2.2i:%2.2i:%2.2i" % (
            year, month, day + 1, hour, minute, second)
        var[day, 0:19] = netCDF4.stringtochar(np.array(timeStr, 'S19'))

    varx = fileOut.createVariable("xAtmWind",
                                  "d",
                                  dimensions=["Time", "nx", "ny"])
    vary = fileOut.createVariable("yAtmWind",
                                  "d",
                                  dimensions=["Time", "nx", "ny"])
    for day in range(0, 10):
        varx[day, :, :] = ugx[:, :] * (float(2 - day) / 2.0)
        vary[day, :, :] = ugy[:, :] * (float(2 - day) / 2.0)

    fileOut.close()
def write_collection(coll, nc_names, coll_properties, cra, nc):
    nc_names.instance_dim = INSTANCE_DIM
    coll.to_netcdf(nc, nc_names=nc_names, use_vlen=(not cra))
    string_id = nc_names.x_var[:(nc_names.x_var.find('_') + 1)]
    for pk, pv in coll_properties.items():
        vname = '{}{}'.format(string_id, pk)
        try:
            dtype = pv.dtype
        except AttributeError:
            dtype = type(pv[0])
        if dtype in (str, unicode):
            dtype = 'S1'
        if dtype == 'S1':
            pv = ['' if v is None else v for v in pv]
            max_char = len(max(pv, key=len))
            dim_name = vname + '_strlen'
            nc.createDimension(dim_name, max_char)
            var = nc.createVariable(
                vname, dtype, dimensions=(INSTANCE_DIM, dim_name))
            for i in range(len(var)):
                var[i] = stringtochar(np.array(pv[i], 'S' + str(max_char)))
        else:
            var = nc.createVariable(vname, dtype, dimensions=(INSTANCE_DIM,))
            var[:] = pv
def control_to_netcdf(controlFile, outputFileName, **kwargs):

    """
    Converts the input file into netCDF format.   
        
    Args:
        fileHandle: is the input file
    """

    start = time.time()

    index = 0
    controlParameterNames = []
    controlParameterDataTypes = []
    
    # Initialize new dataset 
    ncfile = netCDF4.Dataset(outputFileName, mode='w')
 
    fileHandle = open(controlFile, 'r')
    for line in fileHandle:
        if "####" in line:
            nameOfControlParameters = fileHandle.next().strip()
            name = nameOfControlParameters
            controlParameterNames.append(nameOfControlParameters)
            numberOfParameterValues = int(fileHandle.next().strip())
	    
            # Initialize dimension
            ncfile.createDimension(nameOfControlParameters, numberOfParameterValues)

	    nameOfControlParameters = []
            valueType = int(fileHandle.next().strip())
            dataTypeOfParameterValues = _get_datatype(valueType)
	    controlParameterDataTypes.append(dataTypeOfParameterValues)

            for i in range(numberOfParameterValues):
                _store_parameter_values_in_a_list(fileHandle, valueType, nameOfControlParameters)
            
	    if valueType == 4:
                lengthOfLongestWord = max(len(word) for word in nameOfControlParameters)
                ncfile.createDimension('length_of_longest_string_in_'+name, lengthOfLongestWord)
		
		# Define variable
		var = ncfile.createVariable(controlParameterNames[index], controlParameterDataTypes[index], 
					   (controlParameterNames[index], 'length_of_longest_string_in_'+name))

		# Write data for parameters with string values
            	var[:] = netCDF4.stringtochar(numpy.array(nameOfControlParameters))

	    else:
                var = ncfile.createVariable(controlParameterNames[index], controlParameterDataTypes[index], 
					   (controlParameterNames[index],))
                var[:] = nameOfControlParameters
	    
	    index += 1    
    
    # Global attributes
    fileHandle = open(controlFile, 'r')
    ncfile.title = fileHandle.next().strip()
       
    # Close the 'ncfile' object
    ncfile.close()
def write_dsg(file,ofile,my_series):
    nc = netCDF4.Dataset(file)
    vars=nc.variables.keys()
    coord_vars = ['time','time2','depth','lat','lon']
    # find data variables by removing coordinate variables from the variable list
    data_vars = [var for var in vars if var not in coord_vars]
    nt = len(nc.dimensions['time'])
    nz = len(nc.dimensions['depth'])

    nco = netCDF4.Dataset(ofile,'w')

    # create dimensions
    nco.createDimension('time',nt)
    if nz > 1:
        nco.createDimension('depth',nz)

    nchar=20
    nco.createDimension('nchar',nchar)
    # create coordinate variables
    time_v = nco.createVariable('time', 'f8', ('time'))
    lon_v = nco.createVariable('lon','f4')
    lat_v = nco.createVariable('lat','f4')
    if nz > 1:
        depth_v = nco.createVariable('depth','f4',dimensions='depth')
    else:
        depth_v = nco.createVariable('depth','f4')

    station_v = nco.createVariable('site','S1',('nchar'))
# write global attributes
    g_attdict = nc.__dict__
    g_attdict['Conventions'] = 'CF-1.6'
    if nz>1:
        g_attdict['featureType'] = 'timeSeriesProfile'
    else:
        g_attdict['featureType'] = 'timeSeries'

    g_attdict['naming_authority'] = 'gov.usgs'
    g_attdict['id'] = id
    g_attdict['source'] = 'USGS'
    g_attdict['institution'] = 'Woods Hole Coastal and Marine Science Center'
    g_attdict['project'] = my_series.project_name.strip()
    g_attdict['title'] = '%s/%s/%s' % (g_attdict['source'],g_attdict['id'],g_attdict['project'])
    g_attdict['keywords']='Oceans > Ocean Pressure > Water Pressure, Oceans > Ocean Temperature > Water Temperature, Oceans > Salinity/Density > Conductivity, Oceans > Salinity/Density > Salinity'
    g_attdict['keywords_vocabulary'] = 'GCMD Science Keywords'
    g_attdict['standard_name_vocabulary'] = 'CF-1.6'
    g_attdict['creator_email'] = '*****@*****.**'
    g_attdict['creator_name'] =  my_series.project_pi.strip()
    g_attdict['publisher_name'] = 'Ellyn Montgomery'
    g_attdict['summary']  = my_series.project_summary.strip()


    nco.setncatts(g_attdict) 
    # write station variable
    station_v.cf_role = 'timeseries_id'
    station_v.standard_name = 'station_id'
    data = numpy.empty((1,),'S'+repr(nchar))
    data[0] = id
    station_v[:] = netCDF4.stringtochar(data)

    # write time variable
    time_v.units = 'seconds since 1858-11-17 00:00:00 +0:00'
    time_v.standard_name = 'time'
    time_v.calendar = 'gregorian'
    time_v[:] = (nc.variables['time'][:]-2400001.)*3600.*24. + nc.variables['time2'][:]/1000.

    # write lon variable
    lon_v.units = 'degree_east'
    lon_v.standard_name = 'longitude'
    lon_v[:] = nc.variables['lon'][:]

    # write lat variable
    lat_v.units = 'degree_north'
    lat_v.standard_name = 'latitude'
    lat_v[:] = nc.variables['lat'][:]

    # write depth variable
    depth_v.units = 'm'
    depth_v.standard_name = 'depth'   
    depth_v.positive = 'down'
    depth_v.axis = 'Z'
    depth_v[:] = nc.variables['depth'][:]


    # create the data variables
    var_v=[]
    for varname in data_vars:
        ncvar = nc.variables[varname]
        # if time series variable
        if size(ncvar) == nt:
            var = nco.createVariable(varname,ncvar.dtype,('time'))
        elif size(ncvar) == nz:
            var = nco.createVariable(varname,ncvar.dtype,('depth'))
        else:
            var = nco.createVariable(varname,ncvar.dtype,('time','depth'))

    #   load old variable attributes and modify if necessary 
        attdict = ncvar.__dict__
    #   if dounpackshort and 'scale_factor' in attdict: del attdict['scale_factor']

        attdict['coordinates'] = 'time lon lat depth'

        # assign standard_name if in dictionary
        a =[k for (k, v) in d.iteritems() if attdict['long_name'].strip().lower() in v]
        if len(a)==1: attdict['standard_name']=a[0]

        # write variable attributes
        var.setncatts(attdict) 
        # write the data
    #    print ncvar
        var[:] = ncvar[:]
        
    nco.close()
Example #42
0
def generateString(length, alphabet=string.ascii_letters + string.digits + string.punctuation):
    return(''.join([random.choice(alphabet) for i in range(length)]))

# test conversion of arrays of fixed-length strings
# to arrays of characters (with an extra dimension), and vice-versa.

FILE_NAME = 'tst_stringarr.nc'
FILE_FORMAT = 'NETCDF4_CLASSIC'
n2 = 20; nchar = 12; nrecs = 4
data = numpy.empty((nrecs,n2),'S'+repr(nchar))
for nrec in range(nrecs):
    for n in range(n2):
        data[nrec,n] = generateString(nchar)
datau = data.astype('U')
datac = stringtochar(data, encoding='ascii')

class StringArrayTestCase(unittest.TestCase):

    def setUp(self):
        self.file = FILE_NAME
        nc = Dataset(FILE_NAME,'w',format=FILE_FORMAT)
        nc.createDimension('n1',None)
        nc.createDimension('n2',n2)
        nc.createDimension('nchar',nchar)
        v = nc.createVariable('strings','S1',('n1','n2','nchar'))
        v2 = nc.createVariable('strings2','S1',('n1','n2','nchar'))
        # if _Encoding set, string array should automatically be converted
        # to a char array and vice-versan
        v2._Encoding = 'ascii'
        v3 = nc.createVariable('strings3','S1',('n1','n2','nchar'))
Example #43
0
f.close()
f = Dataset('clouds.nc')
cloud_var = f.variables['primary_cloud']
print(cloud_var)
print(cloud_var.datatype.enum_dict)
print(cloud_var[:])
f.close()

# dealing with strings
from netCDF4 import stringtochar
nc = Dataset('stringtest.nc','w',format='NETCDF4_CLASSIC')
nc.createDimension('nchars',3)
nc.createDimension('nstrings',None)
v = nc.createVariable('strings','S1',('nstrings','nchars'))
datain = numpy.array(['foo','bar'],dtype='S3')
v[:] = stringtochar(datain) # manual conversion to char array
print(v[:]) # data returned as char array
v._Encoding = 'ascii' # this enables automatic conversion
v[:] = datain # conversion to char array done internally
print(v[:]) # data returned in numpy string array
nc.close()
# strings in compound types
nc = Dataset('compoundstring_example.nc','w')
dtype = numpy.dtype([('observation', 'f4'),
                     ('station_name','S12')])
station_data_t = nc.createCompoundType(dtype,'station_data')
nc.createDimension('station',None)
statdat = nc.createVariable('station_obs', station_data_t, ('station',))
data = numpy.empty(2,station_data_t.dtype_view)
data['observation'][:] = (123.,3.14)
data['station_name'][:] = ('Boulder','New York')
Example #44
0
def write_param_file(file_name,
                     nc_format='NETCDF3_CLASSIC',
                     glob_atts=NcGlobals(),
                     full_time_length=None,
                     subset_length=None,
                     unit_hydrograph_dt=None,
                     outlet_lon=None,
                     outlet_lat=None,
                     outlet_x_ind=None,
                     outlet_y_ind=None,
                     outlet_decomp_ind=None,
                     outlet_number=None,
                     outlet_mask=None,
                     outlet_name=None,
                     outlet_upstream_gridcells=None,
                     outlet_upstream_area=None,
                     source_lon=None,
                     source_lat=None,
                     source_x_ind=None,
                     source_y_ind=None,
                     source_decomp_ind=None,
                     source_time_offset=None,
                     source2outlet_ind=None,
                     source_tracer=None,
                     unit_hydrograph=None):

    """ Write a standard RVIC Parameter file """

    # ---------------------------------------------------------------- #
    # Open file
    f = Dataset(file_name, 'w', format=nc_format)
    # ---------------------------------------------------------------- #

    # ---------------------------------------------------------------- #
    # Time Variables

    # Timesteps
    timesteps = f.createDimension('timesteps', subset_length)
    timesteps = f.createVariable('timesteps', NC_DOUBLE, ('timesteps',))
    timesteps[:] = np.arange(subset_length)
    for key, val in share.timesteps.__dict__.iteritems():
        if val:
            setattr(timesteps, key, val)
    timesteps.timestep_length = 'unit_hydrograph_dt'

    # ---------------------------------------------------------------- #

    # ---------------------------------------------------------------- #
    # write global attributes
    glob_atts.update()
    for key, val in glob_atts.atts.iteritems():
        if val:
            setattr(f, key, val)
    f.featureType = "timeSeries"
    # ---------------------------------------------------------------- #

    # ---------------------------------------------------------------- #
    # 0-D variables

    # Full time length (size of ring)
    ftl = f.createVariable('full_time_length', NC_INT, ())
    ftl[:] = full_time_length
    for key, val in share.full_time_length.__dict__.iteritems():
        if val:
            setattr(ftl, key, val)

    # Subset Length
    sl = f.createVariable('subset_length', NC_INT, ())
    sl[:] = subset_length
    for key, val in share.subset_length.__dict__.iteritems():
        if val:
            setattr(sl, key, val)

    # UH timestep
    uh_dt = f.createVariable('unit_hydrograph_dt', NC_DOUBLE, ())
    uh_dt[:] = unit_hydrograph_dt
    for key, val in share.unit_hydrograph_dt.__dict__.iteritems():
        if val:
            setattr(uh_dt, key, val)
    # ---------------------------------------------------------------- #

    # ---------------------------------------------------------------- #
    # Outlet Dimensions
    if outlet_y_ind.ndim == 0:
        numoutlets = 1
        outlet_name = np.array([outlet_name])
    else:
        numoutlets = len(outlet_lon)
    ocoords = ('outlets',)
    outlets = f.createDimension(ocoords[0], numoutlets)

    nocoords = ocoords + ('nc_chars',)
    char_names = stringtochar(outlet_name)
    chars = f.createDimension(nocoords[1], char_names.shape[1])
    # ---------------------------------------------------------------- #

    # ---------------------------------------------------------------- #
    # 1-D Outlet Variables

    # Outlet Cell Longitudes
    olon = f.createVariable('outlet_lon', NC_DOUBLE, ocoords)
    olon[:] = outlet_lon
    for key, val in share.outlet_lon.__dict__.iteritems():
        if val:
            setattr(olon, key, val)

    # Outlet Cell Latitudes
    olat = f.createVariable('outlet_lat', NC_DOUBLE, ocoords)
    olat[:] = outlet_lat
    for key, val in share.outlet_lat.__dict__.iteritems():
        if val:
            setattr(olat, key, val)

    # Outlet Cell X Indicies
    oxi = f.createVariable('outlet_x_ind', NC_INT, ocoords)
    oxi[:] = outlet_x_ind
    for key, val in share.outlet_x_ind.__dict__.iteritems():
        if val:
            setattr(oxi, key, val)

    # Outlet Cell Y Indicies
    oyi = f.createVariable('outlet_y_ind', NC_INT, ocoords)
    oyi[:] = outlet_y_ind
    for key, val in share.outlet_y_ind.__dict__.iteritems():
        if val:
            setattr(oyi, key, val)

    # Outlet Cell Decomp IDs
    odi = f.createVariable('outlet_decomp_ind', NC_INT, ocoords)
    odi[:] = outlet_decomp_ind
    for key, val in share.outlet_decomp_ind.__dict__.iteritems():
        if val:
            setattr(odi, key, val)

    # Outlet Cell Number
    on = f.createVariable('outlet_number', NC_INT, ocoords)
    on[:] = outlet_number
    for key, val in share.outlet_number.__dict__.iteritems():
        if val:
            setattr(on, key, val)

    # Outlet Mask
    om = f.createVariable('outlet_mask', NC_INT, ocoords)
    om[:] = outlet_mask
    for key, val in share.outlet_mask.__dict__.iteritems():
        if val:
            setattr(om, key, val)

    # Outlet Upstream area
    oua = f.createVariable('outlet_upstream_area', NC_DOUBLE, ocoords)
    oua[:] = outlet_upstream_area
    for key, val in share.outlet_upstream_area.__dict__.iteritems():
        if val:
            setattr(oua, key, val)

    # Outlet Upstream grid cells
    oug = f.createVariable('outlet_upstream_gridcells', NC_INT, ocoords)
    oug[:] = outlet_upstream_gridcells
    for key, val in share.outlet_upstream_gridcells.__dict__.iteritems():
        if val:
            setattr(oug, key, val)

    # Outlet Names
    onm = f.createVariable('outlet_name', NC_CHAR, nocoords)
    onm[:, :] = char_names
    for key, val in share.outlet_name.__dict__.iteritems():
        if val:
            setattr(onm, key, val)
    # ---------------------------------------------------------------- #

    # ---------------------------------------------------------------- #
    # Source Dimension
    scoords = ('sources',)
    sources = f.createDimension(scoords[0], len(source_lon))
    # ---------------------------------------------------------------- #

    # ---------------------------------------------------------------- #
    # 1D Source Variables

    # Source Cell Longitudes
    slon = f.createVariable('source_lon', NC_DOUBLE, scoords)
    slon[:] = source_lon
    for key, val in share.source_lon.__dict__.iteritems():
        if val:
            setattr(slon, key, val)

    # Source Cell Latitudes
    slat = f.createVariable('source_lat', NC_DOUBLE, scoords)
    slat[:] = source_lat
    for key, val in share.source_lat.__dict__.iteritems():
        if val:
            setattr(slat, key, val)

    # Source Cell X Indicies
    sxi = f.createVariable('source_x_ind', NC_INT, scoords)
    sxi[:] = source_x_ind
    for key, val in share.source_x_ind.__dict__.iteritems():
        if val:
            setattr(sxi, key, val)

    # Source Cell Y Indicies
    syi = f.createVariable('source_y_ind', NC_INT, scoords)
    syi[:] = source_y_ind
    for key, val in share.source_y_ind.__dict__.iteritems():
        if val:
            setattr(syi, key, val)

    # Source Cell Decomp IDs
    sdi = f.createVariable('source_decomp_ind', NC_INT, scoords)
    sdi[:] = source_decomp_ind
    for key, val in share.source_decomp_ind.__dict__.iteritems():
        if val:
            setattr(sdi, key, val)

    # Source Cell Time Offset
    sto = f.createVariable('source_time_offset', NC_INT, scoords)
    sto[:] = source_time_offset
    for key, val in share.source_time_offset.__dict__.iteritems():
        if val:
            setattr(sto, key, val)

    # Source to Outlet Index Mapping
    s2o = f.createVariable('source2outlet_ind', NC_INT, scoords)
    s2o[:] = source2outlet_ind
    for key, val in share.source2outlet_ind.__dict__.iteritems():
        if val:
            setattr(s2o, key, val)

    # ---------------------------------------------------------------- #

    # ---------------------------------------------------------------- #
    # 3-D Source Variables
    uhcords = ('timesteps',) + scoords +('tracers',)
    tracers = f.createDimension(uhcords[2], 1)

    # Unit Hydrographs
    uhs = f.createVariable('unit_hydrograph', NC_DOUBLE, uhcords)
    uhs[:, :] = unit_hydrograph
    for key, val in share.unit_hydrograph.__dict__.iteritems():
        if val:
            setattr(uhs, key, val)
    # ---------------------------------------------------------------- #

    f.close()

    log.info('Finished writing %s' % file_name)
def create_pigment_tss_nc(metadata, data, output_folder):
    """ create a netcdf file for pigment or TSS data """
    netcdf_filepath   = os.path.join(output_folder, "%s.nc" % create_filename_output(metadata, data))
    output_netcdf_obj = Dataset(netcdf_filepath, "w", format="NETCDF4")

    # read gatts from input, add them to output. Some gatts will be overwritten
    input_gatts = metadata['gatts']
    check_vessel_name(input_gatts['vessel_name'])  # this raises a warning only
    if input_gatts['vessel_name'].strip() == '':
        input_gatts['vessel_name'] = 'UNKNOWN VESSEL'

    gatt_to_dispose = ['geospatial_lat_min', 'geospatial_lat_max', 'geospatial_lon_min',
                       'geospatial_lon_max', 'geospatial_vertical_min', 'geospatial_vertical_max',
                       'conventions', 'local_time_zone']

    for gatt in input_gatts.keys():
        if gatt not in gatt_to_dispose:
            if input_gatts[gatt] != '':
                setattr(output_netcdf_obj, gatt, input_gatts[gatt])
    setattr(output_netcdf_obj, 'input_xls_filename', os.path.basename(metadata['filename_input']))

    if 'local_time_zone' in input_gatts.keys():
        if input_gatts['local_time_zone'] != '':
            setattr(output_netcdf_obj, 'local_time_zone', np.float(input_gatts['local_time_zone']))

    output_netcdf_obj.date_created            = datetime.now().strftime("%Y-%m-%dT%H:%M:%SZ")
    output_netcdf_obj.geospatial_vertical_min = data.Depth.min()
    output_netcdf_obj.geospatial_vertical_max = data.Depth.max()

    output_netcdf_obj.createDimension("obs", data.shape[0])
    output_netcdf_obj.createDimension("station", len(data.Station_Code.unique()))
    output_netcdf_obj.createDimension('name_strlen', 50)

    # a profile is defined by a time station combo. 2 profiles at the same time
    # but at a different location can exist. In order to find the unique
    # profiles, the unique values of a string array of 'time-station' is counted
    time_station_arr = ['%s_%s' % (a, b) for a, b in zip(data.index, data.Station_Code.values)]
    len_prof         = len(np.unique(time_station_arr))
    output_netcdf_obj.createDimension("profile", len_prof)

    var_time         = output_netcdf_obj.createVariable("TIME", "d", "profile", fill_value=get_imos_parameter_info('TIME', '_FillValue'))
    var_lat          = output_netcdf_obj.createVariable("LATITUDE", "f4", "station", fill_value=get_imos_parameter_info('LATITUDE', '_FillValue'))
    var_lon          = output_netcdf_obj.createVariable("LONGITUDE", "f4", "station", fill_value=get_imos_parameter_info('LONGITUDE', '_FillValue'))
    var_station_name = output_netcdf_obj.createVariable("station_name", "S1", (u'station', u'name_strlen'))
    var_station_idx  = output_netcdf_obj.createVariable("station_index", "i4", "profile")
    var_profile      = output_netcdf_obj.createVariable("profile", "i4", "profile")
    var_rowsize      = output_netcdf_obj.createVariable("row_size", "i4", "profile")
    var_depth        = output_netcdf_obj.createVariable("DEPTH", "f4", "obs", fill_value=get_imos_parameter_info('DEPTH', '_FillValue'))

    var = 'DEPTH'
    if metadata['varatts']['Depth']['Comments'] != '' or metadata['varatts']['Depth']['Comments'] != 'positive down':
        setattr(output_netcdf_obj[var], 'comments', metadata['varatts']['Depth']['Comments'].replace('positive down', ''))

    # creation of rest of variables
    var_to_dispose = ['Latitude', 'Longitude', 'Depth', 'Time', 'Station_Code']
    for var in data.columns:
        if var not in var_to_dispose:
            if metadata['varatts'][var]['Fill value'] == '':
                fillvalue = -999
            else:
                fillvalue = metadata['varatts'][var]['Fill value']

            output_netcdf_obj.createVariable(var, "d", "obs", fill_value=fillvalue)
            if metadata['varatts'][var]['IMOS long_name'] != '':
                setattr(output_netcdf_obj[var], 'long_name', metadata['varatts'][var]['IMOS long_name'])
            if metadata['varatts'][var]['Units'] != '':
                setattr(output_netcdf_obj[var], 'units', metadata['varatts'][var]['Units'])
            if metadata['varatts'][var]['Comments'] != '':
                setattr(output_netcdf_obj[var], 'comments', metadata['varatts'][var]['Comments'])

            # SPM is set wrongly as a standard_name is original xls files
            if 'SPM' not in var:
                if metadata['varatts'][var]['CF standard_name'] != '':
                    setattr(output_netcdf_obj[var], 'standard_name', metadata['varatts'][var]['CF standard_name'])

            if 'Sample_Number' in var:
                setattr(output_netcdf_obj[var], 'units', 1)

            if np.dtype(data[var]) == 'O':
                os.remove(netcdf_filepath)
                _error('Incorrect values for variable \"%s\"' % var)
            output_netcdf_obj[var][:] = np.array(data[var].values).astype(np.double)

    # Contigious ragged array representation of Stations netcdf 1.5
    # add gatts and variable attributes as stored in config files
    conf_file_generic = os.path.join(os.path.dirname(__file__), 'generate_nc_file_att')
    generate_netcdf_att(output_netcdf_obj, conf_file_generic, conf_file_point_of_truth=True)

    # lat lon depth
    _, idx_station_uniq = np.unique(data.Station_Code, return_index=True)
    idx_station_uniq.sort()
    var_lat[:]          = data.Latitude.values[idx_station_uniq].astype(np.float)
    var_lon[:]          = data.Longitude.values[idx_station_uniq].astype(np.float)
    if np.dtype(data.Depth) == 'O':
        try:
            var_depth[:] = data.Depth.values.astype(np.float)
        except ValueError:
            os.remove(netcdf_filepath)
            _error('Incorrect depth value')
    else:
        var_depth[:]       = data.Depth.values.astype(np.float)
    var_depth.positive = 'down'

    # time
    _, idx_time_station_uniq = np.unique(time_station_arr, return_index=True)
    idx_time_station_uniq.sort()
    time_values      = (data.index[idx_time_station_uniq]).to_pydatetime()
    time_val_dateobj = date2num(time_values, output_netcdf_obj['TIME'].units, output_netcdf_obj['TIME'].calendar)
    var_time[:]      = time_val_dateobj.astype(np.double)

    # station
    var_station_name[:] = stringtochar(np.array(data.Station_Code.values[idx_station_uniq], 'S50'))

    # compute number of observations per profile
    if len_prof == 1:
        var_rowsize[:] = data.shape[0]
    else:
        n_obs_per_prof = []
        for i in range(len_prof - 1):
            n_obs_per_prof.append(idx_time_station_uniq[i + 1] - idx_time_station_uniq[i])
        n_obs_per_prof.append(len(data.index.values) - idx_time_station_uniq[-1])

        var_rowsize[:] = n_obs_per_prof

    # compute association between profile number and station name
    # which station this profile is for
    aa = np.array(data.Station_Code)[idx_station_uniq].tolist()
    bb = np.array(data.Station_Code)[idx_time_station_uniq].tolist()
    var_station_idx[:] = [aa.index(b) + 1 for b in bb]

    # profile
    var_profile[:] = range(1, len_prof + 1)

    output_netcdf_obj.geospatial_vertical_min = output_netcdf_obj['DEPTH'][:].min()
    output_netcdf_obj.geospatial_vertical_max = output_netcdf_obj['DEPTH'][:].max()
    output_netcdf_obj.geospatial_lat_min      = output_netcdf_obj['LATITUDE'][:].min()
    output_netcdf_obj.geospatial_lat_max      = output_netcdf_obj['LATITUDE'][:].max()
    output_netcdf_obj.geospatial_lon_min      = output_netcdf_obj['LONGITUDE'][:].min()
    output_netcdf_obj.geospatial_lon_max      = output_netcdf_obj['LONGITUDE'][:].max()
    output_netcdf_obj.time_coverage_start     = min(time_values).strftime('%Y-%m-%dT%H:%M:%SZ')
    output_netcdf_obj.time_coverage_end       = max(time_values).strftime('%Y-%m-%dT%H:%M:%SZ')

    output_netcdf_obj.close()
    return netcdf_filepath
def create_absorption_nc(metadata, data, output_folder):
    """ create a netcdf file for absorption data """
    netcdf_filepath   = os.path.join(output_folder, "%s.nc" % create_filename_output(metadata, data))
    output_netcdf_obj = Dataset(netcdf_filepath, "w", format="NETCDF4")

    data_dict = data[1]
    data_df = data[0]

    # read gatts from input, add them to output. Some gatts will be overwritten
    input_gatts = metadata['gatts']
    check_vessel_name(input_gatts['vessel_name'])  # this raises a warning only
    if input_gatts['vessel_name'].strip() == '':
        input_gatts['vessel_name'] = 'UNKNOWN VESSEL'

    gatt_to_dispose = ['geospatial_lat_min', 'geospatial_lat_max', 'geospatial_lon_min',
                       'geospatial_lon_max', 'geospatial_vertical_min', 'geospatial_vertical_max',
                       'conventions', 'local_time_zone']

    for gatt in input_gatts.keys():
        if gatt not in gatt_to_dispose:
            if input_gatts[gatt] != '':
                setattr(output_netcdf_obj, gatt, input_gatts[gatt])
    setattr(output_netcdf_obj, 'input_xls_filename', os.path.basename(metadata['filename_input']))

    if 'local_time_zone' in input_gatts.keys():
        if input_gatts['local_time_zone'] != '':
            setattr(output_netcdf_obj, 'local_time_zone', np.float(input_gatts['local_time_zone']))

    output_netcdf_obj.date_created            = datetime.now().strftime("%Y-%m-%dT%H:%M:%SZ")
    output_netcdf_obj.geospatial_vertical_min = min(data_dict['Depth'])
    output_netcdf_obj.geospatial_vertical_max = max(data_dict['Depth'])

    output_netcdf_obj.createDimension("obs", data_df.shape[1])
    output_netcdf_obj.createDimension("station", len(np.unique(data_dict['Station_Code'])))
    output_netcdf_obj.createDimension('name_strlen', 50)
    output_netcdf_obj.createDimension('wavelength', data_df.shape[0])

    # a profile is defined by a time station combo. 2 profiles at the same time
    # but at a different location can exist. In order to find the unique
    # profiles, the unique values of a string array of 'time-station' is counted
    time_station_arr = ['%s_%s' % (a, b) for a, b in zip(data_dict['Dates'], data_dict['Station_Code'])]
    len_prof         = len(np.unique(time_station_arr))
    output_netcdf_obj.createDimension("profile", len_prof)

    var_time         = output_netcdf_obj.createVariable("TIME", "d", "profile", fill_value=get_imos_parameter_info('TIME', '_FillValue'))
    var_lat          = output_netcdf_obj.createVariable("LATITUDE", "f", "station", fill_value=get_imos_parameter_info('LATITUDE', '_FillValue'))
    var_lon          = output_netcdf_obj.createVariable("LONGITUDE", "f", "station", fill_value=get_imos_parameter_info('LONGITUDE', '_FillValue'))
    var_station_name = output_netcdf_obj.createVariable("station_name", "S1", (u'station', u'name_strlen'))
    var_station_idx  = output_netcdf_obj.createVariable("station_index", "i4", "profile")
    var_profile      = output_netcdf_obj.createVariable("profile", "i4", "profile")
    var_rowsize      = output_netcdf_obj.createVariable("row_size", "i4", "profile")
    var_depth        = output_netcdf_obj.createVariable("DEPTH", "f", "obs", fill_value=get_imos_parameter_info('DEPTH', '_FillValue'))
    var_wavelength   = output_netcdf_obj.createVariable("wavelength", "f", "wavelength")

    var = data_dict['main_var_name'][0]
    output_netcdf_obj.createVariable(var, "d", ("obs", "wavelength"), fill_value=metadata['varatts_col'][var]['Fill value'])
    if metadata['varatts_col'][var]['IMOS long_name'] != '':
        setattr(output_netcdf_obj[var], 'long_name', metadata['varatts_col'][var]['IMOS long_name'])
    if metadata['varatts_col'][var]['Units'] != '':
        setattr(output_netcdf_obj[var], 'units', metadata['varatts_col'][var]['Units'])
    if metadata['varatts_col'][var]['Comments'] != '':
        setattr(output_netcdf_obj[var], 'comments', metadata['varatts_col'][var]['Comments'])
    if metadata['varatts_col'][var]['CF standard_name'] != '':
        setattr(output_netcdf_obj[var], 'standard_name', metadata['varatts_col'][var]['CF standard_name'])

    data_val                  = data_df.transpose()
    output_netcdf_obj[var][:] = np.array(data_val.values)

    # Contigious ragged array representation of Stations netcdf 1.5
    # add gatts and variable attributes as stored in config files
    conf_file_generic = os.path.join(os.path.dirname(__file__), 'generate_nc_file_att')
    generate_netcdf_att(output_netcdf_obj, conf_file_generic, conf_file_point_of_truth=True)

    # lat lon depth
    _, idx_station_uniq = np.unique(data_dict['Station_Code'], return_index=True)
    idx_station_uniq.sort()
    var_lat[:]          = np.array(data_dict['Latitude'])[idx_station_uniq]
    var_lon[:]          = np.array(data_dict['Longitude'])[idx_station_uniq]
    var_depth[:]        = data_dict['Depth']
    var_depth.positive  = 'down'

    # time
    _, idx_time_station_uniq = np.unique(time_station_arr, return_index=True)
    idx_time_station_uniq.sort()
    time_values      = (data_dict['Dates'][idx_time_station_uniq]).to_pydatetime()
    time_val_dateobj = date2num(time_values, output_netcdf_obj['TIME'].units, output_netcdf_obj['TIME'].calendar)
    var_time[:]      = time_val_dateobj

    # wavelength
    var = 'Wavelength'
    var_wavelength[:] = data_dict['Wavelength']
    if metadata['varatts_col'][var]['IMOS long_name'] != '':
        setattr(var_wavelength, 'long_name', metadata['varatts_col'][var]['IMOS long_name'])
    if metadata['varatts_col'][var]['Units'] != '':
        setattr(var_wavelength, 'units', metadata['varatts_col'][var]['Units'])
    if metadata['varatts_col'][var]['Comments'] != '':
        setattr(var_wavelength, 'comments', metadata['varatts_col'][var]['Comments'])
    if metadata['varatts_col'][var]['CF standard_name'] != '':
        setattr(var_wavelength, 'standard_name', metadata['varatts_col'][var]['CF standard_name'])

    # stationss
    var_station_name[:] = stringtochar(np.array(data_dict['Station_Code'], 'S50')[np.sort(idx_station_uniq)])

    # compute number of observations per profile
    if len_prof == 1:
        var_rowsize[:] = data.shape[1]
    else:
        n_obs_per_prof = []
        for i in range(len_prof - 1):
            n_obs_per_prof.append(idx_time_station_uniq[i + 1] - idx_time_station_uniq[i])
        n_obs_per_prof.append(data_df.shape[1] - idx_time_station_uniq[-1])

        var_rowsize[:] = n_obs_per_prof

    # compute association between profile number and station name
    # which station this profile is for
    aa = np.array(data_dict['Station_Code'])[idx_station_uniq].tolist()
    bb = np.array(data_dict['Station_Code'])[idx_time_station_uniq].tolist()
    var_station_idx[:] = [aa.index(b) + 1 for b in bb]

    # profile
    var_profile[:] = range(1, len_prof + 1)

    output_netcdf_obj.geospatial_vertical_min = output_netcdf_obj['DEPTH'][:].min()
    output_netcdf_obj.geospatial_vertical_max = output_netcdf_obj['DEPTH'][:].max()
    output_netcdf_obj.geospatial_lat_min      = output_netcdf_obj['LATITUDE'][:].min()
    output_netcdf_obj.geospatial_lat_max      = output_netcdf_obj['LATITUDE'][:].max()
    output_netcdf_obj.geospatial_lon_min      = output_netcdf_obj['LONGITUDE'][:].min()
    output_netcdf_obj.geospatial_lon_max      = output_netcdf_obj['LONGITUDE'][:].max()
    output_netcdf_obj.time_coverage_start     = min(time_values).strftime('%Y-%m-%dT%H:%M:%SZ')
    output_netcdf_obj.time_coverage_end       = max(time_values).strftime('%Y-%m-%dT%H:%M:%SZ')

    output_netcdf_obj.close()
    return netcdf_filepath
Example #47
0
def grid2dsg(ifile,ofile,coord_vars=['time','time2','depth','lat','lon'],
             project_name=None,project_pi=None,project_summary=None):
    nc = netCDF4.Dataset(ifile)
    id = '%s/%s' % (project_name,ifile.split('.')[0])  
    #id = ifile.split('.')[0]
    vars=nc.variables.keys()
    data_vars = [var for var in vars if var not in coord_vars]
    nt = len(nc.dimensions['time'])
    nz = len(nc.dimensions['depth'])
    
    # create dimensions of output file
    nco = netCDF4.Dataset(ofile,'w',clobber=True)
    nco.createDimension('time',nt)
    if nz > 1:
        nco.createDimension('depth',nz)

    nchar=20
    nco.createDimension('nchar',nchar)
    # create coordinate variables
    time_v = nco.createVariable('time', 'f8', ('time'))
    lon_v = nco.createVariable('lon','f4')
    lat_v = nco.createVariable('lat','f4')
    if nz > 1:
        depth_v = nco.createVariable('depth','f4',dimensions='depth')
    else:
        depth_v = nco.createVariable('depth','f4')

    station_v = nco.createVariable('site','S1',('nchar'))
    # write global attributes
    g_attdict = nc.__dict__
    g_attdict['Conventions'] = 'CF-1.6'
    if nz>1:
        g_attdict['featureType'] = 'timeSeriesProfile'
    else:
        g_attdict['featureType'] = 'timeSeries'

    g_attdict['naming_authority'] = 'gov.usgs'
    g_attdict['id'] = id
    g_attdict['source'] = 'USGS'
    g_attdict['institution'] = 'USGS Woods Hole Coastal and Marine Science Center'
    g_attdict['project'] = 'Coastal and Marine Geology Program'
    g_attdict['title'] = '%s/%s/%s' % (g_attdict['source'],project_name,g_attdict['id'])
    g_attdict['keywords']='Oceans > Ocean Pressure > Water Pressure, Oceans > Ocean Temperature > Water Temperature, Oceans > Salinity/Density > Conductivity, Oceans > Salinity/Density > Salinity'
    g_attdict['keywords_vocabulary']='GCMD Science Keywords'
    g_attdict['standard_name_vocabulary'] = 'CF-1.6'
    g_attdict['creator_email'] = '*****@*****.**'
    g_attdict['creator_name'] =  'Rich Signell'
    g_attdict['creator_phone'] = '+1 (508) 548-8700'
    g_attdict['creator_url'] = 'http://www.usgs.gov'
    g_attdict['publisher_email'] = '*****@*****.**'
    g_attdict['publisher_name'] =  'Ellyn Montgomery'
    g_attdict['publisher_phone'] = '+1 (508) 548-8700'
    g_attdict['publisher_url'] = 'http://www.usgs.gov'
    g_attdict['contributor_name'] =  project_pi
    g_attdict['contributor_role'] = 'principalInvestigator' #from esip ACDD
    g_attdict['summary'] = project_summary

    nco.setncatts(g_attdict) 
    # write station variable
    station_v.cf_role = 'timeseries_id'
    station_v.standard_name = 'station_id'
    data = numpy.empty((1,),'S'+repr(nchar))
    data[0] = ifile.split('.')[0]
    station_v[:] = netCDF4.stringtochar(data)

    # write time variable
    time_v.units = 'milliseconds since 1858-11-17 00:00:00 +0:00'
    time_v.standard_name = 'time'
    time_v.calendar = 'gregorian'
    time_v[:] = (np.int64(nc.variables['time'][:])-2400001)*3600*24*1000 + nc.variables['time2'][:]

    # write lon variable
    lon_v.units = 'degree_east'
    lon_v.standard_name = 'longitude'
    lon_v[:] = nc.variables['lon'][:]

    # write lat variable
    lat_v.units = 'degree_north'
    lat_v.standard_name = 'latitude'
    lat_v[:] = nc.variables['lat'][:]

    # write depth variable
    depth_v.units = 'm'
    depth_v.standard_name = 'depth'   
    depth_v.positive = 'down'
    depth_v.axis = 'Z'
    depth_v[:] = nc.variables['depth'][:]


    # create the data variables
    var_v=[]
    for varname in data_vars:
        ncvar = nc.variables[varname]
        # if time series variable
        if size(ncvar) == nt:
            var = nco.createVariable(varname,ncvar.dtype,('time'))
        elif size(ncvar) == nz:
            var = nco.createVariable(varname,ncvar.dtype,('depth'))
        else:
            var = nco.createVariable(varname,ncvar.dtype,('time','depth'))

    #   load old variable attributes and modify if necessary 
        attdict = ncvar.__dict__
    #   if dounpackshort and 'scale_factor' in attdict: del attdict['scale_factor']
      
        attdict['coordinates'] = 'time lon lat depth'

        # assign standard_name if in dictionary
        a =[k for (k, v) in d.iteritems() if attdict['long_name'].strip().lower() in v]
        if len(a)==1: attdict['standard_name']=a[0]
        
        # write variable attributes
        var.setncatts(attdict) 
        # write the data
    #    print ncvar
        var[:] = ncvar[:]
    nco.close()
Example #48
0
def generate_qld_netcdf(resource_id, metadata, output_path):
    """
    generate a netcdf file (wave or current) for a resource_id
    :param resource_id:
    :param metadata:
    :param output_path:
    :return:
    """
    last_mod_date = get_last_modification_date_resource_id(metadata['package_name'], resource_id)
    if last_mod_date == None:
        # creating an epoch date
        last_mod_date = datetime.datetime(1970, 1, 1, 0, 0)

    wave_df = retrieve_json_data(resource_id)
    if wave_df is None:
        logger.error('No valid data to process for resource_id {resource_id}'.format(resource_id=resource_id))
        return

    if 'Current Speed' in wave_df.columns.values or 'Current Direction' in wave_df.columns.values:
        logger.info('Processing Current data')
        data_code = 'V'
    else:
        logger.info('Processing Wave data')
        data_code = 'W'

    var_mapping = param_mapping_parser(QLD_WAVE_PARAMETER_MAPPING)
    date_start_str = wave_df.index.strftime('%Y%m%dT%H%M%SZ').values.min()
    date_end_str = wave_df.index.strftime('%Y%m%dT%H%M%SZ').values.max()
    nc_file_name = 'DES-QLD_{data_code}_{date_start}_{deployment_code}_WAVERIDER_FV01_END-{date_end}.nc'.format(
        date_start=date_start_str,
        data_code=data_code,
        deployment_code=metadata['site_name'].replace(' ', '-'),
        date_end=date_end_str)

    nc_file_path = os.path.join(output_path, nc_file_name)
    logger.info('Creating NetCDF {netcdf} from resource_id {resource_id}'.format(
        netcdf=os.path.basename(nc_file_path),
        resource_id=resource_id))

    with Dataset(nc_file_path, 'w', format='NETCDF4') as nc_file_obj:
        nc_file_obj.createDimension("TIME", wave_df.index.shape[0])
        nc_file_obj.createDimension("station_id_strlen", 30)

        nc_file_obj.createVariable("LATITUDE", "d", fill_value=FILLVALUE)
        nc_file_obj.createVariable("LONGITUDE", "d", fill_value=FILLVALUE)
        nc_file_obj.createVariable("STATION_ID", "S1", ("TIME", "station_id_strlen"))
        nc_file_obj["LATITUDE"][:] = metadata['latitude']
        nc_file_obj["LONGITUDE"][:] = metadata['longitude']
        nc_file_obj["STATION_ID"][:] = [stringtochar(np.array(metadata['site_name'], 'S30'))] * \
                                       wave_df.shape[0]

        var_time = nc_file_obj.createVariable("TIME", "d", "TIME")
        # add gatts and variable attributes as stored in config files
        generate_netcdf_att(nc_file_obj, NC_ATT_CONFIG, conf_file_point_of_truth=True)
        time_val_dateobj = date2num(wave_df.index.to_pydatetime(), var_time.units, var_time.calendar)

        var_time[:] = time_val_dateobj

        df_varname_ls = list(wave_df[wave_df.keys()].columns.values)

        for df_varname in df_varname_ls:
            df_varname_mapped_equivalent = df_varname
            mapped_varname = var_mapping.loc[df_varname_mapped_equivalent]['VARNAME']

            dtype = wave_df[df_varname].values.dtype
            if dtype == np.dtype('int64'):
                dtype = np.dtype('int16')  # short
            else:
                dtype = np.dtype('f')

            nc_file_obj.createVariable(mapped_varname, dtype, "TIME", fill_value=FILLVALUE)
            set_var_attr(nc_file_obj, var_mapping, mapped_varname, df_varname_mapped_equivalent, dtype)
            setattr(nc_file_obj[mapped_varname], 'coordinates', "TIME LATITUDE LONGITUDE")

            try:
                nc_file_obj[mapped_varname][:] = wave_df[df_varname].values
            except ValueError:
                pass

        setattr(nc_file_obj, 'operator', metadata['owner'])
        setattr(nc_file_obj, 'title', 'Delayed mode wave data measured at {site}'.format(site=metadata['site_name']))
        setattr(nc_file_obj, 'site_code', metadata['site_code'])
        setattr(nc_file_obj, 'site_name', metadata['site_name'])
        if not np.isnan(metadata['wmo_id']):
            setattr(nc_file_obj, 'wmo_id', int(metadata['wmo_id']))

        setattr(nc_file_obj, 'geospatial_lat_min', metadata['latitude'])
        setattr(nc_file_obj, 'geospatial_lat_max', metadata['latitude'])
        setattr(nc_file_obj, 'geospatial_lon_min', metadata['longitude'])
        setattr(nc_file_obj, 'geospatial_lon_max', metadata['longitude'])
        setattr(nc_file_obj, 'time_coverage_start', wave_df.index.strftime('%Y-%m-%dT%H:%M:%SZ').values.min())
        setattr(nc_file_obj, 'time_coverage_end', wave_df.index.strftime('%Y-%m-%dT%H:%M:%SZ').values.max())
        setattr(nc_file_obj, 'date_created', pd.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ"))

        data_url = '{base_url_data}{id}&limit={limit}'.format(base_url_data=BASE_URL_DATA,
                                                              id=resource_id,
                                                              limit=LIMIT_VALUES)
        setattr(nc_file_obj, 'data_original_url', data_url)
        setattr(nc_file_obj, 'glossary', 'https://www.qld.gov.au/environment/coasts-waterways/beach/waves-glossary')
        setattr(nc_file_obj, 'wave_monitoring_faq', 'https://www.qld.gov.au/environment/coasts-waterways/beach/waves')
        setattr(nc_file_obj, 'first_deployment_date', metadata.first_deployment_date.strftime("%Y-%m-%dT%H:%M:%SZ"))
        setattr(nc_file_obj, 'water_depth', metadata.water_depth)
        setattr(nc_file_obj, 'water_depth_units', 'meters')
        setattr(nc_file_obj, 'site_information_url', metadata.source_url)
        setattr(nc_file_obj, 'owner', metadata.owner)
        setattr(nc_file_obj, 'instrument_model', metadata.instrument_model)
        setattr(nc_file_obj, 'instrument_maker', metadata.instrument_maker)
        setattr(nc_file_obj, 'waverider_type', metadata.waverider_type)

        github_comment = 'Product created with %s' % get_git_revision_script_url(os.path.realpath(__file__))
        nc_file_obj.lineage = ('%s %s' % (getattr(nc_file_obj, 'lineage', ''), github_comment))

        # save to pickle file the new last downloaded date for future run
        pickle_file = os.path.join(WIP_DIR, 'last_downloaded_date_resource_id.pickle')
        last_downloaded_date_resources = load_pickle_db(pickle_file)
        if not last_downloaded_date_resources:
            last_downloaded_date_resources = dict()
        last_modification = last_mod_date

        last_downloaded_date_resources[resource_id] = last_modification
        with open(pickle_file, 'wb') as p_write:
            pickle.dump(last_downloaded_date_resources, p_write)

        return nc_file_path
Example #49
0
def encode_cf_variable(array):
    """Converts an XArray into an XArray suitable for saving as a netCDF
    variable
    """
    dimensions = array.dimensions
    data = array.data
    attributes = array.attributes.copy()
    encoding = array.encoding.copy()

    if (np.issubdtype(data.dtype, np.datetime64)
            or (data.dtype.kind == 'O'
                and isinstance(data.reshape(-1)[0], datetime))):
        # encode datetime arrays into numeric arrays
        (data, units, calendar) = utils.encode_cf_datetime(
            data, encoding.pop('units', None), encoding.pop('calendar', None))
        attributes['units'] = units
        attributes['calendar'] = calendar
    elif data.dtype == np.dtype('O'):
        # Occasionally, one will end up with variables with dtype=object
        # (likely because they were created from pandas objects which don't
        # maintain dtype careful). Thie code makes a best effort attempt to
        # encode them into a dtype that NETCDF can handle by inspecting the
        # dtype of the first element.
        dtype = np.array(data.reshape(-1)[0]).dtype
        # N.B. the "astype" call below will fail if data cannot be cast to the
        # type of its first element (which is probably the only sensible thing
        # to do).
        data = np.asarray(data).astype(dtype)

    def get_to(source, dest, k):
        v = source.get(k)
        dest[k] = v
        return v

    # encode strings as character arrays
    if np.issubdtype(data.dtype, (str, unicode)):
        data = nc4.stringtochar(data)
        dimensions = dimensions + ('string%s' % data.shape[-1],)

    # unscale/mask
    if any(k in encoding for k in ['add_offset', 'scale_factor']):
        data = np.array(data, dtype=float, copy=True)
        if 'add_offset' in encoding:
            data -= get_to(encoding, attributes, 'add_offset')
        if 'scale_factor' in encoding:
            data /= get_to(encoding, attributes, 'scale_factor')

    # replace NaN with the fill value
    if '_FillValue' in encoding:
        if encoding['_FillValue'] is np.nan:
            attributes['_FillValue'] = np.nan
        else:
            nans = np.isnan(data)
            if nans.any():
                data[nans] = get_to(encoding, attributes, '_FillValue')

    # restore original dtype
    if 'dtype' in encoding:
        if np.issubdtype(encoding['dtype'], int):
            data = data.round()
        data = data.astype(encoding['dtype'])

    return xarray.XArray(dimensions, data, attributes, encoding=encoding)
Example #50
0
    def __write_array(self):
        """ Write history file """

        # ------------------------------------------------------------ #
        # Open file
        f = Dataset(self.filename, 'w', self._file_format)
        # ------------------------------------------------------------ #

        # ------------------------------------------------------------ #
        # Time Variable
        time = f.createDimension('time', None)

        time = f.createVariable('time', self._ncprec, ('time',),
                                **self.ncvaropts)
        time[:] = self._out_times[:self._out_data_i+1]
        for key, val in share.time.__dict__.iteritems():
            if val:
                setattr(time, key, val)
        time.calendar = self._calendar

        if self._avgflag != 'I':
            nv = f.createDimension('nv', 2)

            time.bounds = 'time_bnds'

            time_bnds = f.createVariable('time_bnds', self._ncprec,
                                         ('time', 'nv',), **self.ncvaropts)
            time_bnds[:, :] = self._out_time_bnds[:self._out_data_i+1]
        # ------------------------------------------------------------ #

        # ------------------------------------------------------------ #
        # Setup Coordinate Variables
        coords = ('outlets',)

        outlets = f.createDimension('outlets', self._num_outlets)

        nocoords = coords + ('nc_chars',)
        char_names = stringtochar(self._outlet_name)
        chars = f.createDimension(nocoords[1], char_names.shape[1])
        # ------------------------------------------------------------ #

        # ------------------------------------------------------------ #
        # Variables
        outlet_lon = f.createVariable('lon', self._ncprec, coords,
                                      **self.ncvaropts)
        outlet_lat = f.createVariable('lat', self._ncprec, coords,
                                      **self.ncvaropts)
        outlet_x_ind = f.createVariable('outlet_x_ind', NC_INT, coords,
                                        **self.ncvaropts)
        outlet_y_ind = f.createVariable('outlet_y_ind', NC_INT, coords,
                                        **self.ncvaropts)
        outlet_decomp_ind = f.createVariable('outlet_decomp_ind', NC_INT,
                                             coords, **self.ncvaropts)
        onm = f.createVariable('outlet_name', NC_CHAR, nocoords,
                               **self.ncvaropts)

        outlet_lon[:] = self._outlet_lon
        outlet_lat[:] = self._outlet_lat
        outlet_x_ind[:] = self._outlet_x_ind
        outlet_y_ind[:] = self._outlet_y_ind
        outlet_decomp_ind[:] = self._outlet_decomp_ind
        onm[:, :] = char_names

        for key, val in share.outlet_lon.__dict__.iteritems():
            if val:
                setattr(outlet_lon, key, val)

        for key, val in share.outlet_lat.__dict__.iteritems():
            if val:
                setattr(outlet_lat, key, val)

        for key, val in share.outlet_y_ind.__dict__.iteritems():
            if val:
                setattr(outlet_y_ind, key, val)

        for key, val in share.outlet_x_ind.__dict__.iteritems():
            if val:
                setattr(outlet_x_ind, key, val)

        for key, val in share.outlet_decomp_ind.__dict__.iteritems():
            if val:
                setattr(outlet_decomp_ind, key, val)

        for key, val in share.outlet_name.__dict__.iteritems():
            if val:
                setattr(onm, key, val)
        # ------------------------------------------------------------ #

        # ------------------------------------------------------------ #
        # Write Fields
        tcoords = ('time',) + coords

        for field in self._fincl:
            var = f.createVariable(field, self._ncprec, tcoords,
                                   **self.ncvaropts)
            var[:, :] = self._out_data[field][:self._out_data_i+1]

            for key, val in getattr(share, field).__dict__.iteritems():
                if val:
                    setattr(var, key, val)
            var.units = self._units
        # ------------------------------------------------------------ #

        # ------------------------------------------------------------ #
        # write global attributes
        self._glob_ats.update()
        for key, val in self._glob_ats.atts.iteritems():
            if val:
                setattr(f, key, val)
        f.featureType = "timeSeries"
        # ------------------------------------------------------------ #
        f.close()
        log.info('Finished writing %s', self.filename)
Example #51
0
def add_var(dst, name, dims, data=None, shape=None, atts=None, dtype=None, zlib=True, fillValue=None, 
            lusestr=True, **kwargs):
  ''' Function to add a Variable to a NetCDF Dataset; returns the Variable reference. '''
  # all remaining kwargs are passed on to dst.createVariable()
  # use data array to infer dimensions and data type
  if data is not None:
    if not isinstance(data,np.ndarray): raise TypeError     
    if len(dims) != data.ndim: raise NCDataError, "Number of dimensions in '%s' does not match data array."%(name,)    
    if shape: 
      if shape != data.shape: raise NCDataError, "Shape of '%s' does not match data array."%(name,)
    else: shape = data.shape
    # get dtype 
    if dtype: 
      if dtype != data.dtype: data = data.astype(dtype)
        # raise NCDataError, "Data type in '%s' does not match data array."%(name,) 
    else: dtype = data.dtype
  if dtype is None: raise NCDataError, "Cannot construct a NetCDF Variable without a data array or an abstract data type."
  dtype = np.dtype(dtype) # use numpy types
  if dtype is np.dtype('bool_'): dtype = np.dtype('i1') # cast numpy bools as 8-bit integers
  lstrvar = ( dtype.kind == 'S' and not lusestr )
  # check/create dimensions
  if shape is None: shape = [None,]*len(dims)
  else: shape = list(shape)
  if len(shape) != len(dims): raise NCAxisError 
  for i,dim in zip(xrange(len(dims)),dims):
    if dim in dst.dimensions:
      if shape[i] is None: 
        shape[i] = len(dst.dimensions[dim])
      else: 
        if shape[i] != len(dst.dimensions[dim]): 
          raise NCAxisError, 'Size of dimension %s does not match records! %i != %i'%(dim,shape[i],len(dst.dimensions[dim]))
    else: 
      if shape[i] is not None: dst.createDimension(dim, size=shape[i])
      else: raise NCAxisError, "Cannot construct dimension '%s' without size information."%(dims,)
  dims = tuple(dims); shape = tuple(shape)
  # figure out parameters for variable
  varargs = dict() # arguments to be passed to createVariable
  if isinstance(zlib,dict): varargs.update(zlib)
  elif zlib: varargs.update(zlib_default)
  varargs.update(kwargs)
  if fillValue is None:
    if atts and '_FillValue' in atts: fillValue = atts['_FillValue'] # will be removed later
    elif atts and 'missing_value' in atts: fillValue = atts['missing_value']
    elif data is not None and isinstance(data,ma.MaskedArray): # defaults values for numpy masked arrays
      fillValue = ma.default_fill_value(dtype)
      # if isinstance(dtype,np.bool_): fillValue = True
      # elif isinstance(dtype,np.integer): fillValue = 999999
      # elif isinstance(dtype,np.floating): fillValue = 1.e20
      # elif isinstance(dtype,np.complexfloating): fillValue = 1.e20+0j
      # elif isinstance(dtype,np.flexible): fillValue = 'N/A'
      # else: fillValue = None # for 'object'
    else: pass # if it is not a masked array and no missing value information was passed, don't assign fillValue 
  else:  
    if data is not None and isinstance(data,ma.MaskedArray): data.set_fill_value(fillValue)
  # make sure fillValue is OK (there have been problems...)    
  fillValue = checkFillValue(fillValue, dtype)
  if fillValue is not None:
    atts['missing_value'] = fillValue # I use fillValue and missing_value the same way
  # add extra dimension for strings
  if lstrvar and dtype.itemsize > 1:
    # add extra dimension
    shape = shape + (dtype.itemsize,)
    dims = dims + ('str_dim_'+name,) # naming pattern for string dimensions
    dst.createDimension(dims[-1], size=shape[-1])
    # change dtype to single char string  
    dtype = np.dtype('|S1')
    # convert string arrays to char arrays
    if data is not None: 
      data = nc.stringtochar(data)
      assert data.dtype == dtype, str(data.dtype)+', '+str(dtype)    
  # create netcdf variable  
  var = dst.createVariable(name, dtype, dims, fill_value=fillValue, **varargs)
  # add attributes
  if atts: var.setncatts(coerceAtts(atts))
  # assign coordinate data if given
  if data is not None: var[:] = data   
  # return var reference
  return var
    def writeToNetCDF(self, inputFilePath, outputFilePath, commandLine, format, flatten=False, _debug=True):
        # weird, but useful to check whether the HeaderInfo id in the netCDF
        # file
        setattr(self, "header_info", None)
        netCDFHandler = _file_existence_check(outputFilePath, format, self)
        delattr(self, "header_info")

        #### Replace the original isdigit function
        camera_opt = "SWIR"

        ##### Write the data from metadata to netCDF #####
        for key, data in self.__dict__.items():
            tempGroup = netCDFHandler.createGroup(key) if not flatten else netCDFHandler
            for subkey, subdata in data.items():
                if not _IS_DIGIT(subdata): #Case for letter variables
                    if 'date' in subkey and subkey != "date of installation" and subkey != "date of handover":
                        assert subdata != "todo", '"todo" is not a legal value for the keys'

                        tempVariable = tempGroup.createVariable(_reformat_string(subkey), 'f8')
                        tempVariable[...] = translate_time(subdata)
                        setattr(tempVariable, "units",     "days since 1970-01-01 00:00:00")
                        setattr(tempVariable, "calender", "gregorian")

                    setattr(tempGroup, _reformat_string(subkey), subdata)

                else: #Case for digits variables
                    if "time" in data:
                        yearMonthDate = data["time"]
                    elif "Time" in data:
                        yearMonthDate = data["Time"]
                    setattr(tempGroup, _reformat_string(subkey), subdata)

                    short_name, attributes = _generate_attr(subkey)
                    tempVariable = tempGroup.createVariable(short_name, 'f8')
                    for name, value in attributes.items():
                        setattr(tempVariable, name, value)
                    tempVariable[...] = float(subdata)

        ##### Write data from header files to netCDF #####
        wavelength = get_wavelength(inputFilePath)
        netCDFHandler.createDimension("wavelength", len(wavelength))

        # Check if the wavelength is correctly collected
        assert len(wavelength) in (955, 272), "ERROR: Failed to get wavlength informations. Please check if you modified the *.hdr files"

        camera_opt = 'VNIR' if len(wavelength) == 955 else 'SWIR' # Choose appropriate camera by counting the number of wavelengths.

        tempWavelength = netCDFHandler.createVariable(
            "wavelength", 'f8', 'wavelength')
        setattr(tempWavelength, 'long_name', 'Hyperspectral Wavelength')
        setattr(tempWavelength, 'units', 'nanometers')
        tempWavelength[...] = wavelength
        write_header_file(inputFilePath, netCDFHandler, flatten, _debug)

        ##### Write the data from frameIndex files to netCDF #####
        tempFrameTime = frame_index_parser(''.join((inputFilePath.strip("raw"), "frameIndex.txt")), yearMonthDate)
        netCDFHandler.createDimension("time", len(tempFrameTime))

        # Check if the frame time information is correctly collected
        assert len(tempFrameTime), "ERROR: Failed to collect frame time information from " + ''.join((inputFilePath.strip("raw"), "frameIndex.txt")) + ". Please check the file."
       
        frameTime    = netCDFHandler.createVariable("frametime", "f8", ("time",))
        frameTime[...] = tempFrameTime
        setattr(frameTime, "units",    "days since 1970-01-01 00:00:00")
        setattr(frameTime, "calender", "gregorian")
        setattr(frameTime, "notes",    "Each time of the scanline of the y taken")

        ########################### Adding geographic positions ###########################

        xPixelsLocation, yPixelsLocation, boundingBox, googleMapAddress\
         = pixel2Geographic("".join((inputFilePath[:-4],"_metadata.json")), "".join((inputFilePath,'.hdr')), camera_opt)

        # Check if the image width and height are correctly collected.
        assert len(xPixelsLocation) > 0 and len(yPixelsLocation) > 0, "ERROR: Failed to collect the image size metadata from " + "".join((inputFilePath,'.hdr')) + ". Please check the file."
        
        netCDFHandler.createDimension("x", len(xPixelsLocation))
        x    = netCDFHandler.createVariable("x", "f8", ("x",))
        x[...] = xPixelsLocation
        setattr(netCDFHandler.variables["x"], "units", "meters")
        setattr(netCDFHandler.variables['x'], 'reference_point', 'Southeast corner of field')
        setattr(netCDFHandler.variables['x'], "long_name", "North-south offset from southeast corner of field")

        netCDFHandler.createDimension("y", len(yPixelsLocation))
        y    = netCDFHandler.createVariable("y", "f8", ("y",))
        y[...] = yPixelsLocation
        setattr(netCDFHandler.variables["y"], "units", "meters")
        setattr(netCDFHandler.variables['y'], 'reference_point', 'Southeast corner of field')
        setattr(netCDFHandler.variables['y'], "long_name", "Distance west of the southeast corner of the field")

        lat_pt, lon_pt = REFERENCE_POINT

        lat_pt_var = netCDFHandler.createVariable("lat_reference_point", "f8")
        lat_pt_var[...] = lat_pt
        setattr(netCDFHandler.variables["lat_reference_point"], "units", "degrees_north")
        setattr(netCDFHandler.variables["lat_reference_point"], "long_name", "Latitude of the master reference point at southeast corner of field")
        setattr(netCDFHandler.variables["lat_reference_point"], "provenance", "https://github.com/terraref/reference-data/issues/32 by Dr. David LeBauer")

        lon_pt_var = netCDFHandler.createVariable("lon_reference_point", "f8")
        lon_pt_var[...] = lon_pt
        setattr(netCDFHandler.variables["lon_reference_point"], "units", "degrees_east")
        setattr(netCDFHandler.variables["lon_reference_point"], "long_name", "Longitude of the master reference point at southeast corner of field")
        setattr(netCDFHandler.variables["lon_reference_point"], "provenance", "https://github.com/terraref/reference-data/issues/32 by Dr. David LeBauer")

        x_ref_pt = netCDFHandler.createVariable("x_reference_point", "f8")
        x_ref_pt[...] = 0
        setattr(netCDFHandler.variables["x_reference_point"], "units", "meters")
        setattr(netCDFHandler.variables["x_reference_point"], "long_name", "x of the master reference point at southeast corner of field")
        setattr(netCDFHandler.variables["x_reference_point"], "provenance", "https://github.com/terraref/reference-data/issues/32 by Dr. David LeBauer")

        y_ref_pt = netCDFHandler.createVariable("y_reference_point", "f8")
        y_ref_pt[...] = 0
        setattr(netCDFHandler.variables["y_reference_point"], "units", "meters")
        setattr(netCDFHandler.variables["y_reference_point"], "long_name", "y of the master reference point at southeast corner of field")
        setattr(netCDFHandler.variables["y_reference_point"], "provenance", "https://github.com/terraref/reference-data/issues/32 by Dr. David LeBauer")

        # Write latitude and longitude of bounding box
        SE, SW, NE, NW = boundingBox[0], boundingBox[1], boundingBox[2], boundingBox[3]
        lat_se, lon_se = tuple(SE.split(", "))
        lat_sw, lon_sw = tuple(SW.split(", "))
        lat_ne, lon_ne = tuple(NE.split(", "))
        lat_nw, lon_nw = tuple(NW.split(", "))

        latSe = netCDFHandler.createVariable("lat_img_se", "f8")
        latSe[...] = float(lat_se)
        setattr(netCDFHandler.variables["lat_img_se"], "units", "degrees_north")
        setattr(netCDFHandler.variables["lat_img_se"], "long_name", "Latitude of southeast corner of image")

        # have a "x_y_img_se" in meters, double
        lonSe = netCDFHandler.createVariable("lon_img_se", "f8")
        lonSe[...] = float(lon_se)
        setattr(netCDFHandler.variables["lon_img_se"], "units", "degrees_east")
        setattr(netCDFHandler.variables["lon_img_se"], "long_name", "Longitude of southeast corner of image")

        latSw = netCDFHandler.createVariable("lat_img_sw", "f8")
        latSw[...] = float(lat_sw)
        setattr(netCDFHandler.variables["lat_img_sw"], "units", "degrees_north")
        setattr(netCDFHandler.variables["lat_img_sw"], "long_name", "Latitude of southwest corner of image")

        lonSw = netCDFHandler.createVariable("lon_img_sw", "f8")
        lonSw[...] = float(lon_sw)
        setattr(netCDFHandler.variables["lon_img_sw"], "units", "degrees_east")
        setattr(netCDFHandler.variables["lon_img_sw"], "long_name", "Longitude of southwest corner of image")

        latNe = netCDFHandler.createVariable("lat_img_ne", "f8")
        latNe[...] = float(lat_ne)
        setattr(netCDFHandler.variables["lat_img_ne"], "units", "degrees_north")
        setattr(netCDFHandler.variables["lat_img_ne"], "long_name", "Latitude of northeast corner of image")

        lonNe = netCDFHandler.createVariable("lon_img_ne", "f8")
        lonNe[...] = float(lon_ne)
        setattr(netCDFHandler.variables["lon_img_ne"], "units", "degrees_east")
        setattr(netCDFHandler.variables["lon_img_ne"], "long_name", "Longitude of northeast corner of image")

        latNw = netCDFHandler.createVariable("lat_img_nw", "f8")
        latNw[...] = float(lat_nw)
        setattr(netCDFHandler.variables["lat_img_nw"], "units", "degrees_north")
        setattr(netCDFHandler.variables["lat_img_nw"], "long_name", "Latitude of northwest corner of image")

        lonNw = netCDFHandler.createVariable("lon_img_nw", "f8")
        lonNw[...] = float(lon_nw)
        setattr(netCDFHandler.variables["lon_img_nw"], "units", "degrees_east")
        setattr(netCDFHandler.variables["lon_img_nw"], "long_name", "Longitude of northwest corner of image")

        xSe = netCDFHandler.createVariable("x_img_se", "f8")
        xSe[...] = float(x[-1])
        setattr(netCDFHandler.variables["x_img_se"], "units", "meters")
        setattr(netCDFHandler.variables["x_img_se"], "long_name", "Southeast corner of image, north distance to reference point")

        # have a "x_y_img_se" in meters, double
        ySe = netCDFHandler.createVariable("y_img_se", "f8")
        ySe[...] = float(y[-1])
        setattr(netCDFHandler.variables["y_img_se"], "units", "meters")
        setattr(netCDFHandler.variables["y_img_se"], "long_name", "Southeast corner of image, west distance to reference point")

        xSw = netCDFHandler.createVariable("x_img_sw", "f8")
        xSw[...] = float(x[0])
        setattr(netCDFHandler.variables["x_img_sw"], "units", "meters")
        setattr(netCDFHandler.variables["x_img_sw"], "long_name", "Southwest corner of image, north distance to reference point")

        ySw = netCDFHandler.createVariable("y_img_sw", "f8")
        ySw[...] = float(y[-1])
        setattr(netCDFHandler.variables["y_img_sw"], "units", "meters")
        setattr(netCDFHandler.variables["y_img_sw"], "long_name", "Southwest corner of image, west distance to reference point")

        xNe = netCDFHandler.createVariable("x_img_ne", "f8")
        xNe[...] = float(x[-1])
        setattr(netCDFHandler.variables["x_img_ne"], "units", "meters")
        setattr(netCDFHandler.variables["x_img_ne"], "long_name", "Northeast corner of image, north distance to reference point")

        yNe = netCDFHandler.createVariable("y_img_ne", "f8")
        yNe[...] = float(y[0])
        setattr(netCDFHandler.variables["y_img_ne"], "units", "meters")
        setattr(netCDFHandler.variables["y_img_ne"], "long_name", "Northeast corner of image, west distance to reference point")

        xNw = netCDFHandler.createVariable("x_img_nw", "f8")
        xNw[...] = float(x[0])
        setattr(netCDFHandler.variables["x_img_nw"], "units", "meters")
        setattr(netCDFHandler.variables["x_img_nw"], "long_name", "Northwest corner of image, north distance to reference point")

        yNw = netCDFHandler.createVariable("y_img_nw", "f8")
        yNw[...] = float(y[0])
        setattr(netCDFHandler.variables["y_img_nw"], "units", "meters")
        setattr(netCDFHandler.variables["y_img_nw"], "long_name", "Northwest corner of image, west distance to reference point")
        
        if format == "NETCDF3_CLASSIC":
            netCDFHandler.createDimension("length of Google Map String", len(googleMapAddress))
            googleMapView = netCDFHandler.createVariable("Google_Map_View", "S1", ("length of Google Map String",))
            tempAddress = np.chararray((1, 1), itemsize=len(googleMapAddress))
            tempAddress[:] = googleMapAddress
            googleMapView[...] = stringtochar(tempAddress)[0]
        else:
            googleMapView = netCDFHandler.createVariable("Google_Map_View", str)
            googleMapView[...] = googleMapAddress

        setattr(netCDFHandler.variables["Google_Map_View"], "usage", "copy and paste to your web browser")
        setattr(netCDFHandler.variables["Google_Map_View"], 'reference_point', 'Southeast corner of field')

        y_pxl_sz = netCDFHandler.createVariable("y_pxl_sz", "f8")
        y_pxl_sz[...] = 0.98526434004512529576754637665e-3
        setattr(netCDFHandler.variables["y_pxl_sz"], "units", "meters")
        setattr(netCDFHandler.variables["y_pxl_sz"], "notes", "y coordinate length of a single pixel in pictures captured by SWIR and VNIR camera")

        if camera_opt == "SWIR":
            x_pxl_sz = netCDFHandler.createVariable("x_pxl_sz", "f8")
            x_pxl_sz[...] = 1.025e-3
            setattr(netCDFHandler.variables["x_pxl_sz"], "units", "meters")
            setattr(netCDFHandler.variables["x_pxl_sz"], "notes", "x coordinate length of a single pixel in SWIR images")
        else:
            x_pxl_sz = netCDFHandler.createVariable("x_pxl_sz", "f8")
            x_pxl_sz[...] = 1.930615052e-3
            setattr(netCDFHandler.variables["x_pxl_sz"], "units", "meters")
            setattr(netCDFHandler.variables["x_pxl_sz"], "notes", "x coordinate length of a single pixel in VNIR images")

        ##### Write the history to netCDF #####
        netCDFHandler.history = ''.join((_TIMESTAMP(), ': python ', commandLine))

        netCDFHandler.close()
Example #53
0
FILE_FORMAT = 'NETCDF4_CLASSIC'
chars = '1234567890aabcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'

nc = Dataset(FILE_NAME,'w',format=FILE_FORMAT)
n2 = 10; nchar = 12; nrecs = 4
nc.createDimension('n1',None)
nc.createDimension('n2',n2)
nc.createDimension('nchar',nchar)
v = nc.createVariable('strings','S1',('n1','n2','nchar'))
for nrec in range(nrecs):
    data = []
    data = numpy.empty((n2,),'S'+repr(nchar))
    # fill data with random nchar character strings
    for n in range(n2):
        data[n] = ''.join([random.choice(chars) for i in range(nchar)])
    print nrec,data
    # convert data to array of characters with an extra dimension 
    # (the number of characters per string) added to the right.
    datac = stringtochar(data)
    v[nrec] = datac
nc.close()

nc = Dataset(FILE_NAME)
v = nc.variables['strings']
print v.shape, v.dtype
for nrec in range(nrecs):
    # read character array back, convert to an array of strings
    # of length equal to the rightmost dimension.
    print nrec, chartostring(v[nrec])
nc.close()
Example #54
0
    def write_restart(self, current_history_files, history_restart_files):

        # ------------------------------------------------------------ #
        # Open file
        filename = self.timestamp.strftime(self.__fname_format)
        f = Dataset(filename, 'w', self.file_format)
        # ------------------------------------------------------------ #

        # ------------------------------------------------------------ #
        # Time Variables

        # Current time
        time = f.createDimension('time', 1)
        time = f.createVariable('time', NC_DOUBLE, ('time',))
        time[:] = date2num(self.timestamp, TIMEUNITS, calendar=self._calendar)

        for key, val in share.time.__dict__.iteritems():
            if val:
                setattr(time, key, val)
        time.calendar = self._calendar

        # Timesteps
        timesteps = f.createDimension('timesteps', self.full_time_length)
        timesteps = f.createVariable('timesteps', NC_DOUBLE, ('timesteps',))
        timesteps[:] = np.arange(self.full_time_length)

        for key, val in share.timesteps.__dict__.iteritems():
            if val:
                setattr(timesteps, key, val)
        timesteps.timestep_length = 'unit_hydrograph_dt'

        # UH timestep
        unit_hydrograph_dt = f.createVariable('unit_hydrograph_dt',
                                              NC_DOUBLE, ())
        unit_hydrograph_dt[:] = self.unit_hydrograph_dt
        for key, val in share.unit_hydrograph_dt.__dict__.iteritems():
            if val:
                setattr(unit_hydrograph_dt, key, val)

        timemgr_rst_type = f.createVariable('timemgr_rst_type', NC_DOUBLE, ())
        timemgr_rst_type[:] = self._calendar_key
        for key, val in share.timemgr_rst_type.__dict__.iteritems():
            if val:
                setattr(timemgr_rst_type, key, val)

        timemgr_rst_step_sec = f.createVariable('timemgr_rst_step_sec',
                                                NC_DOUBLE, ())
        timemgr_rst_step_sec[:] = self.unit_hydrograph_dt
        for key, val in share.timemgr_rst_step_sec.__dict__.iteritems():
            if val:
                setattr(timemgr_rst_step_sec, key, val)

        timemgr_rst_start_ymd = f.createVariable('timemgr_rst_start_ymd',
                                                 NC_DOUBLE, ())
        timemgr_rst_start_ymd[:] = self._start_date.year*10000 \
            + self._start_date.month*100 + self._start_date.day
        for key, val in share.timemgr_rst_start_ymd.__dict__.iteritems():
            if val:
                setattr(timemgr_rst_start_ymd, key, val)

        timemgr_rst_start_tod = f.createVariable('timemgr_rst_start_tod',
                                                 NC_DOUBLE, ())
        timemgr_rst_start_tod[:] = (self._start_ord % 1) * SECSPERDAY
        for key, val in share.timemgr_rst_start_tod.__dict__.iteritems():
            if val:
                setattr(timemgr_rst_start_tod, key, val)

        timemgr_rst_ref_ymd = f.createVariable('timemgr_rst_ref_ymd',
                                               NC_DOUBLE, ())
        timemgr_rst_ref_ymd[:] = REFERENCE_DATE
        for key, val in share.timemgr_rst_ref_ymd.__dict__.iteritems():
            if val:
                setattr(timemgr_rst_ref_ymd, key, val)

        timemgr_rst_ref_tod = f.createVariable('timemgr_rst_ref_tod',
                                               NC_DOUBLE, ())
        timemgr_rst_ref_tod[:] = REFERENCE_TIME
        for key, val in share.timemgr_rst_ref_tod.__dict__.iteritems():
            if val:
                setattr(timemgr_rst_ref_tod, key, val)

        timemgr_rst_curr_ymd = f.createVariable('timemgr_rst_curr_ymd',
                                                NC_DOUBLE, ())
        timemgr_rst_curr_ymd[:] = self.timestamp.year*10000 + \
            self.timestamp.month*100+self.timestamp.day
        for key, val in share.timemgr_rst_curr_ymd.__dict__.iteritems():
            if val:
                setattr(timemgr_rst_curr_ymd, key, val)

        timemgr_rst_curr_tod = f.createVariable('timemgr_rst_curr_tod',
                                                NC_DOUBLE, ())
        timemgr_rst_curr_tod[:] = (self.time_ord % 1)*SECSPERDAY
        for key, val in share.timemgr_rst_curr_tod.__dict__.iteritems():
            if val:
                setattr(timemgr_rst_curr_tod, key, val)

        # ------------------------------------------------------------ #
        # Setup Tape Dimensions
        coords = ('tapes', 'max_chars')
        ntapes = f.createDimension(coords[0], len(history_restart_files))
        ntapes = f.createDimension(coords[1], MAX_NC_CHARS)
        # ------------------------------------------------------------ #

        # ------------------------------------------------------------ #
        # Write Fields
        locfnh = f.createVariable('locfnh', NC_CHAR, coords)
        for i, string in enumerate(current_history_files):
            locfnh[i, :] = stringtochar(np.array(string.ljust(MAX_NC_CHARS)))
        locfnh.long_name = 'History filename'
        locfnh.comment = 'This variable NOT needed for startup or branch simulations'

        locfnhr = f.createVariable('locfnhr', NC_CHAR, coords)
        for i, string in enumerate(history_restart_files):
            locfnh[i, :] = stringtochar(np.array(string.ljust(MAX_NC_CHARS)))
        locfnhr.long_name = 'History restart filename'
        locfnhr.comment = 'This variable NOT needed for startup or branch simulations'
        # ------------------------------------------------------------ #

        # ------------------------------------------------------------ #
        # Setup Point Dimensions
        coords = ('outlets', )

        outlets = f.createDimension(coords[0], self.n_outlets)
        # ------------------------------------------------------------ #

        # ------------------------------------------------------------ #
        # Write Fields
        oyi = f.createVariable('outlet_y_ind', NC_INT, coords[0])
        oyi[:] = self.outlet_y_ind
        for key, val in share.outlet_y_ind.__dict__.iteritems():
            if val:
                setattr(oyi, key, val)

        oxi = f.createVariable('outlet_x_ind', NC_INT, coords[0])
        oxi[:] = self.outlet_x_ind
        for key, val in share.outlet_x_ind.__dict__.iteritems():
            if val:
                setattr(oxi, key, val)

        odi = f.createVariable('outlet_decomp_ind', NC_INT, coords[0])
        odi[:] = self.outlet_decomp_ind
        for key, val in share.outlet_decomp_ind.__dict__.iteritems():
            if val:
                setattr(odi, key, val)

        tcoords = ('timesteps', ) + coords

        for tracer in RVIC_TRACERS:
            ring = f.createVariable('{0}_ring'.format(tracer),
                                    NC_DOUBLE, tcoords)
            ring[:, :] = self.ring[tracer][:, :]

            for key, val in share.ring.__dict__.iteritems():
                if val:
                    setattr(ring, key, val)
        # ------------------------------------------------------------ #

        # ------------------------------------------------------------ #
        # write global attributes
        self.glob_atts.update()

        for key, val in self.glob_atts.atts.iteritems():
            if val:
                setattr(f, key, val)
        # ------------------------------------------------------------ #

        f.close()
        log.info('Finished writing %s', filename)

        return filename
Example #55
0
nc_file_name_var.setncattr("long_name", "source file for this instrument")

nc_instrument_type_var = nc_out.createVariable("instrument_type", "S1", ("instrument", "strlen"))
nc_instrument_type_var.setncattr("long_name", "source instrument make, model, serial_number")

filen = 0
data = numpy.empty(len(files), dtype="S256")
instrument = numpy.empty(len(files), dtype="S256")
for path_file in files:
    data[filen] = path_file
    with Dataset(path_file, mode="r") as nc_type:
        instrument[filen] = nc_type.instrument + '-' + nc_type.instrument_serial_number

    filen += 1

nc_file_name_var[:] = stringtochar(data)
nc_instrument_type_var[:] = stringtochar(instrument)

#
# create a list of variables needed
#

filen = 0

# include the DEPTH variable
var_names_all = var_to_agg + ['DEPTH']

# add the ancillary variables for the ones requested
for v in var_names_all:
    if hasattr(var_list[v], 'ancillary_variables'):
        var_names_all += [var_list[v].ancillary_variables]
Example #56
0
def write_param_file(
    file_name,
    nc_format="NETCDF3_CLASSIC",
    glob_atts=NcGlobals(),
    full_time_length=None,
    subset_length=None,
    unit_hydrograph_dt=None,
    outlet_lon=None,
    outlet_lat=None,
    outlet_x_ind=None,
    outlet_y_ind=None,
    outlet_decomp_ind=None,
    outlet_number=None,
    outlet_mask=None,
    outlet_name=None,
    outlet_upstream_gridcells=None,
    outlet_upstream_area=None,
    source_lon=None,
    source_lat=None,
    source_x_ind=None,
    source_y_ind=None,
    source_decomp_ind=None,
    source_time_offset=None,
    source2outlet_ind=None,
    unit_hydrograph=None,
    zlib=True,
    complevel=4,
    least_significant_digit=None,
):

    """Write a standard RVIC Parameter file """

    # ---------------------------------------------------------------- #
    # netCDF variable options
    ncvaropts = {"zlib": zlib, "complevel": complevel, "least_significant_digit": least_significant_digit}
    # ---------------------------------------------------------------- #

    # ---------------------------------------------------------------- #
    # Open file
    f = Dataset(file_name, "w", format=nc_format)
    # ---------------------------------------------------------------- #

    # ---------------------------------------------------------------- #
    # Time Variables

    # Timesteps
    timesteps = f.createDimension("timesteps", subset_length)
    timesteps = f.createVariable("timesteps", NC_DOUBLE, ("timesteps",), **ncvaropts)
    timesteps[:] = np.arange(subset_length)
    for key, val in iteritems(share.timesteps):
        if val:
            setattr(timesteps, key, val.encode())
    timesteps.timestep_length = b"unit_hydrograph_dt"

    # ---------------------------------------------------------------- #

    # ---------------------------------------------------------------- #
    # write global attributes
    glob_atts.update()
    for key, val in iteritems(glob_atts.atts):
        if val:
            setattr(f, key, val.encode())
    f.featureType = b"timeSeries"
    # ---------------------------------------------------------------- #

    # ---------------------------------------------------------------- #
    # 0-D variables

    # Full time length (size of ring)
    ftl = f.createVariable("full_time_length", NC_INT, (), **ncvaropts)
    ftl[:] = full_time_length
    for key, val in iteritems(share.full_time_length):
        if val:
            setattr(ftl, key, val.encode())

    # Subset Length
    sl = f.createVariable("subset_length", NC_INT, (), **ncvaropts)
    sl[:] = subset_length
    for key, val in iteritems(share.subset_length):
        if val:
            setattr(sl, key, val.encode())

    # UH timestep
    uh_dt = f.createVariable("unit_hydrograph_dt", NC_DOUBLE, (), **ncvaropts)
    uh_dt[:] = unit_hydrograph_dt
    for key, val in iteritems(share.unit_hydrograph_dt):
        if val:
            setattr(uh_dt, key, val.encode())
    # ---------------------------------------------------------------- #

    # ---------------------------------------------------------------- #
    # Outlet Dimensions
    if outlet_y_ind.ndim == 0:
        numoutlets = 1
        outlet_name = np.array([outlet_name])
    else:
        numoutlets = len(outlet_lon)
    ocoords = ("outlets",)
    f.createDimension(ocoords[0], numoutlets)

    nocoords = ocoords + ("nc_chars",)
    char_names = stringtochar(outlet_name)
    f.createDimension(nocoords[1], char_names.shape[1])
    # ---------------------------------------------------------------- #

    # ---------------------------------------------------------------- #
    # 1-D Outlet Variables

    # Outlet Cell Longitudes
    olon = f.createVariable("outlet_lon", NC_DOUBLE, ocoords, **ncvaropts)
    olon[:] = outlet_lon
    for key, val in iteritems(share.outlet_lon):
        if val:
            setattr(olon, key, val.encode())

    # Outlet Cell Latitudes
    olat = f.createVariable("outlet_lat", NC_DOUBLE, ocoords, **ncvaropts)
    olat[:] = outlet_lat
    for key, val in iteritems(share.outlet_lat):
        if val:
            setattr(olat, key, val.encode())

    # Outlet Cell X Indicies
    oxi = f.createVariable("outlet_x_ind", NC_INT, ocoords, **ncvaropts)
    oxi[:] = outlet_x_ind
    for key, val in iteritems(share.outlet_x_ind):
        if val:
            setattr(oxi, key, val.encode())

    # Outlet Cell Y Indicies
    oyi = f.createVariable("outlet_y_ind", NC_INT, ocoords, **ncvaropts)
    oyi[:] = outlet_y_ind
    for key, val in iteritems(share.outlet_y_ind):
        if val:
            setattr(oyi, key, val.encode())

    # Outlet Cell Decomp IDs
    odi = f.createVariable("outlet_decomp_ind", NC_INT, ocoords, **ncvaropts)
    odi[:] = outlet_decomp_ind
    for key, val in iteritems(share.outlet_decomp_ind):
        if val:
            setattr(odi, key, val.encode())

    # Outlet Cell Number
    on = f.createVariable("outlet_number", NC_INT, ocoords, **ncvaropts)
    on[:] = outlet_number
    for key, val in iteritems(share.outlet_number):
        if val:
            setattr(on, key, val.encode())

    # Outlet Mask
    om = f.createVariable("outlet_mask", NC_INT, ocoords, **ncvaropts)
    om[:] = outlet_mask
    for key, val in iteritems(share.outlet_mask):
        if val:
            setattr(om, key, val.encode())

    # Outlet Upstream area
    oua = f.createVariable("outlet_upstream_area", NC_DOUBLE, ocoords, **ncvaropts)
    oua[:] = outlet_upstream_area
    for key, val in iteritems(share.outlet_upstream_area):
        if val:
            setattr(oua, key, val.encode())

    # Outlet Upstream grid cells
    oug = f.createVariable("outlet_upstream_gridcells", NC_INT, ocoords, **ncvaropts)
    oug[:] = outlet_upstream_gridcells
    for key, val in iteritems(share.outlet_upstream_gridcells):
        if val:
            setattr(oug, key, val.encode())

    # Outlet Names
    onm = f.createVariable("outlet_name", NC_CHAR, nocoords)
    onm[:, :] = char_names
    for key, val in iteritems(share.outlet_name):
        if val:
            setattr(onm, key, val.encode())
    # ---------------------------------------------------------------- #

    # ---------------------------------------------------------------- #
    # Source Dimension
    scoords = ("sources",)
    f.createDimension(scoords[0], len(source_lon))
    # ---------------------------------------------------------------- #

    # ---------------------------------------------------------------- #
    # 1D Source Variables

    # Source Cell Longitudes
    slon = f.createVariable("source_lon", NC_DOUBLE, scoords, **ncvaropts)
    slon[:] = source_lon
    for key, val in iteritems(share.source_lon):
        if val:
            setattr(slon, key, val.encode())

    # Source Cell Latitudes
    slat = f.createVariable("source_lat", NC_DOUBLE, scoords, **ncvaropts)
    slat[:] = source_lat
    for key, val in iteritems(share.source_lat):
        if val:
            setattr(slat, key, val.encode())

    # Source Cell X Indicies
    sxi = f.createVariable("source_x_ind", NC_INT, scoords, **ncvaropts)
    sxi[:] = source_x_ind
    for key, val in iteritems(share.source_x_ind):
        if val:
            setattr(sxi, key, val.encode())

    # Source Cell Y Indicies
    syi = f.createVariable("source_y_ind", NC_INT, scoords, **ncvaropts)
    syi[:] = source_y_ind
    for key, val in iteritems(share.source_y_ind):
        if val:
            setattr(syi, key, val.encode())

    # Source Cell Decomp IDs
    sdi = f.createVariable("source_decomp_ind", NC_INT, scoords, **ncvaropts)
    sdi[:] = source_decomp_ind
    for key, val in iteritems(share.source_decomp_ind):
        if val:
            setattr(sdi, key, val.encode())

    # Source Cell Time Offset
    sto = f.createVariable("source_time_offset", NC_INT, scoords, **ncvaropts)
    sto[:] = source_time_offset
    for key, val in iteritems(share.source_time_offset):
        if val:
            setattr(sto, key, val.encode())

    # Source to Outlet Index Mapping
    s2o = f.createVariable("source2outlet_ind", NC_INT, scoords, **ncvaropts)
    s2o[:] = source2outlet_ind
    for key, val in iteritems(share.source2outlet_ind):
        if val:
            setattr(s2o, key, val.encode())

    # ---------------------------------------------------------------- #

    # ---------------------------------------------------------------- #
    # 3-D Source Variables
    uhcords = ("timesteps",) + scoords + ("tracers",)
    f.createDimension(uhcords[2], 1)

    # Unit Hydrographs
    uhs = f.createVariable("unit_hydrograph", NC_DOUBLE, uhcords, **ncvaropts)
    uhs[:, :] = unit_hydrograph
    for key, val in iteritems(share.unit_hydrograph):
        if val:
            setattr(uhs, key, val.encode())
    # ---------------------------------------------------------------- #

    f.close()

    log.info("Finished writing %s", file_name)