Example #1
0
def read_netCDF_var(fname, vname, oneD=False, ftime=-1, flevel=-1):
    '''
    Read a variable from a netCDF file
    '''
    try:
        nc = _Dataset(fname, 'r')
    except IOError:
        raise IOError('Unable to open %s' % fname)

    if (not variable_exist(fname, vname)):
        raise Execption('variable %s does not exist in %s') % (vname, fname)

    if (oneD):
        var = nc.variables[vname][:]
    else:
        if ((ftime == -1) and (flevel == -1)):
            var = nc.variables[vname][:, :]
        elif ((ftime == -1) and (flevel != -1)):
            var = nc.variables[vname][flevel, :, :]
        elif ((ftime != -1) and (flevel == -1)):
            var = nc.variables[vname][ftime, :, :]
        elif ((ftime != -1) and (flevel != -1)):
            var = nc.variables[vname][ftime, flevel, :, :]

    try:
        nc.close()
    except IOError:
        raise IOError('Unable to close %s' % fname)

    return _np.squeeze(var)
Example #2
0
def save_netCDF(ts, fname, leave_open=False):
    file_mode = 'w'
    ni = _Dataset(fname, file_mode)

    time_dim = ni.createDimension('time', ts.data.shape[0])
    dim_data_col = ni.createDimension('data_columns', ts.data.shape[1])

    ts_time_num = _date2num(ts.data.index.to_pydatetime(),
                            unit_time)  #.astype(float)
    time_var = ni.createVariable('time', ts_time_num.dtype, 'time')
    time_var[:] = ts_time_num
    time_var.units = 'days since 1900-01-01'

    var_data = ni.createVariable('data', ts.data.values.dtype,
                                 ('time', 'data_columns'))
    var_data[:] = ts.data.values

    ts_columns = ts.data.columns.values.astype(str)
    var_data_collumns = ni.createVariable('data_columns', ts_columns.dtype,
                                          'data_columns')
    var_data_collumns[:] = ts_columns

    ni._data_period = none2nan(ts._data_period)
    ni._x_label = none2nan(ts._x_label)
    ni._y_label = none2nan(ts._y_label)
    ni.info = none2nan(ts.info)

    ni._atm_py_commit = _git_tools.current_commit()

    if leave_open:
        return ni
    else:
        ni.close()
Example #3
0
def create_topography_file(fpath, axs, elevation, options=None, **kwargs):
    """A helper to create a topography DEM file with NetCDF CF convention.

    The key of the elevation is fixed to `elevation` for convenience. By default, the spatial axes
    `x` and `y` use EPSG 3857 system. All length units are in meters (i.e., `m`).

    Arguments
    ---------
    fpath : str or PathLike
        The path to the file.
    axs : a list/tuple of nplike.ndarray
        The coordinates of the gridlines in x (west-east) and y (south-north) direction.
    elevation : nplike.ndarray
        The elevation data with shape (ny, nx)
    options : dict or None
        To overwrite the default attribute values of `x`, `y`, `elevation`, and `root`.
    **kwargs
        Keyword arguments sent to netCDF4.Dataset.
    """
    fpath = _Path(fpath).expanduser().resolve()
    _options = {"elevation": {"units": "m"}}
    _options.update({} if options is None else options)

    with _Dataset(fpath, "w", **kwargs) as dset:
        _write_to_dataset(dset,
                          axs, {"elevation": elevation},
                          options=_options)
Example #4
0
def save_netCDF(ts,fname, leave_open = False):
    file_mode = 'w'
    ni = _Dataset(fname, file_mode)

    time_dim = ni.createDimension('time', ts.data.shape[0])
    dim_data_col = ni.createDimension('data_columns', ts.data.shape[1])

    ts_time_num = _date2num(ts.data.index.to_pydatetime(), unit_time)#.astype(float)
    time_var = ni.createVariable('time', ts_time_num.dtype, 'time')
    time_var[:] = ts_time_num
    time_var.units = 'days since 1900-01-01'

    var_data = ni.createVariable('data', ts.data.values.dtype, ('time', 'data_columns'))
    var_data[:] = ts.data.values

    ts_columns = ts.data.columns.values.astype(str)
    var_data_collumns = ni.createVariable('data_columns', ts_columns.dtype, 'data_columns')
    var_data_collumns[:] = ts_columns

    ni._data_period = none2nan(ts._data_period)
    ni._x_label = none2nan(ts._x_label)
    ni._y_label =  none2nan(ts._y_label)
    ni.info = none2nan(ts.info)

    ni._atm_py_commit = _git_tools.current_commit()

    if leave_open:
        return ni
    else:
        ni.close()
Example #5
0
def write_soln_to_file(fpath, soln, time, tidx, ngh=0, **kwargs):
    """Write a solution snapshot to an existing NetCDF file.

    Arguments
    ---------
    fpath : str or PathLike
        The path to the file.
    soln : torchswe.utils.data.WHUHVModel or torchswe.utils.data.HUVModel
        The States instance containing solutions.
    time : float
        The simulation time of this snapshot.
    tidx : int
        The index of the snapshot time in the temporal axis.
    ngh : int
        The number of ghost-cell layers out side each boundary.
    **kwargs
        Keyword arguments sent to netCDF4.Dataset.
    """
    fpath = _Path(fpath).expanduser().resolve()

    # determine if it's a WHUHVModel or HUVModel
    if hasattr(soln, "w"):
        keys = ["w", "hu", "hv"]
    else:
        keys = ["h", "u", "v"]

    if ngh == 0:
        data = {k: soln[k] for k in keys}
    else:
        slc = slice(ngh, -ngh)  # alias for convenience; non-ghost domain
        data = {k: soln[k][slc, slc] for k in keys}

    with _Dataset(fpath, "a", **kwargs) as dset:
        _add_time_data_to_dataset(dset, data, time, tidx)
Example #6
0
def create_soln_snapshot_file(fpath, grid, soln, **kwargs):
    """Create a NetCDF file with a single snapshot of solutions.

    Arguments
    ---------
    fpath : str or PathLike
        The path to the file.
    grid : torchswe.mpi.data.Gridlines
        The Gridlines instance corresponds to the solutions.
    soln : torchswe.utils.data.WHUHVModel or torchswe.utils.data.HUVModel
        The snapshot of the solution.
    **kwargs
        Keyword arguments sent to netCDF4.Dataset.
    """
    fpath = _Path(fpath).expanduser().resolve()

    try:
        data = {k: soln[k] for k in ["w", "hu", "hv"]}
        options = {"w": {"units": "m"}, "hu": {"units": "m2 s-1"}, "hv": {"units": "m2 s-1"}}
    except AttributeError as err:
        if "has no attribute \'w\'" in str(err):  # a HUVModel
            data = {k: soln[k] for k in ["h", "u", "v"]}
            options = {"h": {"units": "m"}, "u": {"units": "m s-1"}, "v": {"units": "m s-1"}}
        else:
            raise

    with _Dataset(fpath, "w", parallel=True, comm=grid.comm, **kwargs) as dset:
        _write_to_dataset(
            dset, [grid.x.cntr, grid.y.cntr], data, [grid.gnx, grid.gny],
            [grid.ibg, grid.ied, grid.jbg, grid.jed], (grid.gxbg, grid.gyed),
            ((grid.gxed-grid.gxbg)/grid.gnx, (grid.gyed-grid.gybg)/grid.gny), options)
        dset.sync()
Example #7
0
def create_empty_soln_file(fpath, grid, model="whuhv", **kwargs):
    """Create an new NetCDF file for solutions using the corresponding grid object.

    Create an empty NetCDF4 file with axes `x`, `y`, and `time`. `x` and `y` are defined at cell
    centers. The spatial coordinates use EPSG 3856. The temporal axis is limited with dimension
    `ntime`. Also, it creates empty solution variables called `w`, `hu`, and `hv` to the dataset
    with `NaN` for all values. The shapes of these variables are `(ntime, ny, nx)`. The units of
    them are "m", "m2 s-1", and "m2 s-1", respectively.

    Arguments
    ---------
    fpath : str or PathLike
        The path to the file.
    grid : torchswe.utils.data.Gridlines
        The Gridlines instance corresponds to the solutions.
    model : str, either "whuhv" or "huv"
        The type of solution model: the conservative form (w, hu, hv) or non-conservative form (
        h, u, v).
    **kwargs
        Keyword arguments sent to netCDF4.Dataset.
    """
    fpath = _Path(fpath).expanduser().resolve()

    if model == "whuhv":
        data = {k: None for k in ["w", "hu", "hv"]}
        options = {
            "w": {
                "units": "m"
            },
            "hu": {
                "units": "m2 s-1"
            },
            "hv": {
                "units": "m2 s-1"
            }
        }
    elif model == "huv":
        data = {k: None for k in ["h", "u", "v"]}
        options = {
            "h": {
                "units": "m"
            },
            "u": {
                "units": "m s-1"
            },
            "v": {
                "units": "m s-1"
            }
        }

    with _Dataset(fpath, "w", **kwargs) as dset:
        _write_to_dataset(dset, [grid.x.cntr, grid.y.cntr, grid.t],
                          data,
                          corner=[grid.x.vert[0], grid.y.vert[-1]],
                          options=options)
Example #8
0
def save_netCDF(vp, fname, leave_open=False):

    # if ts._time_format == 'timedelta':
    #     ts.timed

    file_mode = 'w'
    try:
        ni = _Dataset(fname, file_mode)
    except RuntimeError:
        if _os.path.isfile(fname):
            _os.remove(fname)
            ni = _Dataset(fname, file_mode)

    time_dim = ni.createDimension('altitude', vp.data.shape[0])
    dim_data_col = ni.createDimension('data_columns', vp.data.shape[1])

    # ts_time_num = _date2num(ts.data.index.to_pydatetime(), _unit_time)#.astype(float)
    altitude = vp.data.index
    altitude_var = ni.createVariable('altitude', altitude.dtype, 'altitude')
    altitude_var[:] = altitude.values
    altitude_var.units = 'meters'

    var_data = ni.createVariable('data', vp.data.values.dtype,
                                 ('altitude', 'data_columns'))
    var_data[:] = vp.data.values

    vp_columns = vp.data.columns.values.astype(str)
    var_data_collumns = ni.createVariable('data_columns', vp_columns.dtype,
                                          'data_columns')
    var_data_collumns[:] = vp_columns

    ni._type = type(vp).__name__
    # ni._data_period = none2nan(vp._data_period)
    ni._x_label = none2nan(vp._x_label)
    ni._y_label = none2nan(vp._y_label)
    # ni.info = none2nan(vp.info)
    ni._atm_py_commit = _git_tools.current_commit()

    if leave_open:
        return ni
    else:
        ni.close()
Example #9
0
def save_netCDF(vp, fname, leave_open = False):

    # if ts._time_format == 'timedelta':
    #     ts.timed

    file_mode = 'w'
    try:
        ni = _Dataset(fname, file_mode)
    except RuntimeError:
        if _os.path.isfile(fname):
            _os.remove(fname)
            ni = _Dataset(fname, file_mode)

    time_dim = ni.createDimension('altitude', vp.data.shape[0])
    dim_data_col = ni.createDimension('data_columns', vp.data.shape[1])

    # ts_time_num = _date2num(ts.data.index.to_pydatetime(), _unit_time)#.astype(float)
    altitude = vp.data.index
    altitude_var = ni.createVariable('altitude', altitude.dtype, 'altitude')
    altitude_var[:] = altitude.values
    altitude_var.units = 'meters'

    var_data = ni.createVariable('data', vp.data.values.dtype, ('altitude', 'data_columns'))
    var_data[:] = vp.data.values

    vp_columns = vp.data.columns.values.astype(str)
    var_data_collumns = ni.createVariable('data_columns', vp_columns.dtype, 'data_columns')
    var_data_collumns[:] = vp_columns

    ni._type = type(vp).__name__
    # ni._data_period = none2nan(vp._data_period)
    ni._x_label = none2nan(vp._x_label)
    ni._y_label =  none2nan(vp._y_label)
    # ni.info = none2nan(vp.info)
    ni._atm_py_commit = _git_tools.current_commit()

    if leave_open:
        return ni
    else:
        ni.close()
Example #10
0
def create_soln_snapshot_file(fpath, grid, soln, **kwargs):
    """Create a NetCDF file with a single snapshot of solutions.

    Arguments
    ---------
    fpath : str or PathLike
        The path to the file.
    grid : torchswe.utils.data.Gridlines
        The Gridlines instance corresponds to the solutions.
    soln : torchswe.utils.data.WHUHVModel or torchswe.utils.data.HUVModel
        The snapshot of the solution.
    **kwargs
        Keyword arguments sent to netCDF4.Dataset.
    """
    fpath = _Path(fpath).expanduser().resolve()

    try:
        data = {k: soln[k] for k in ["w", "hu", "hv"]}
        options = {
            "w": {
                "units": "m"
            },
            "hu": {
                "units": "m2 s-1"
            },
            "hv": {
                "units": "m2 s-1"
            }
        }
    except AttributeError as err:
        if "has no attribute \'w\'" in str(err):  # a HUVModel
            data = {k: soln[k] for k in ["h", "u", "v"]}
            options = {
                "h": {
                    "units": "m"
                },
                "u": {
                    "units": "m s-1"
                },
                "v": {
                    "units": "m s-1"
                }
            }
        else:
            raise

    with _Dataset(fpath, "w", **kwargs) as dset:
        _write_to_dataset(dset, [grid.x.cntr, grid.y.cntr],
                          data,
                          corner=[grid.x.vert[0], grid.y.vert[-1]],
                          options=options)
Example #11
0
def load_netCDF(fname):

    ni = _Dataset(fname, 'r')

    # load time
    time_var = ni.variables['time']
    time_var.units
    ts_time = _num2date(time_var[:], time_var.units)
    timestamp = _pd.DatetimeIndex(ts_time)

    # load  data
    var_data = ni.variables['data']
    ts_data = _pd.DataFrame(var_data[:], index=timestamp)

    # load column names
    var_data_col = ni.variables['data_columns']
    ts_data = _pd.DataFrame(var_data[:],
                            index=timestamp,
                            columns=var_data_col[:])

    # create time series
    ts_out = TimeSeries(ts_data)

    # load attributes and attach to time series
    for atr in ni.ncattrs():
        value = ni.getncattr(atr)
        # there is a bug in pandas where it does not like numpy types ->
        if type(value).__name__ == 'str':
            pass
        elif 'float' in value.dtype.name:
            value = float(value)
        elif 'int' in value.dtype.name:
            value = int(value)
        # netcdf did not like NoneType so i converted it to np.nan. Here i am converting back.
        elif _np.isnan(value):
            value = None

        setattr(ts_out, atr, value)

    ni.close()
    return ts_out
Example #12
0
def variable_exist(fname, vname, debug=False):
    '''
    Check if a variable in a file exists
    '''

    result = False

    try:
        nc = _Dataset(fname, 'r')
    except IOError:
        raise IOError('Unable to open %s' % fname)

    if (vname in nc.variables.keys()):
        result = True

    try:
        nc.close()
    except IOError:
        raise IOError('Unable to close %s' % fname)

    return result
Example #13
0
def load_netCDF(fname):

    ni = _Dataset(fname, 'r')

    # load time
    time_var = ni.variables['time']
    time_var.units
    ts_time = _num2date(time_var[:], time_var.units)
    timestamp = _pd.DatetimeIndex(ts_time)

    # load  data
    var_data = ni.variables['data']
    ts_data = _pd.DataFrame(var_data[:], index=timestamp)

    # load column names
    var_data_col = ni.variables['data_columns']
    ts_data = _pd.DataFrame(var_data[:], index=timestamp,
                           columns=var_data_col[:])

    # create time series
    ts_out = TimeSeries(ts_data)

    # load attributes and attach to time series
    for atr in ni.ncattrs():
        value = ni.getncattr(atr)
        # there is a bug in pandas where it does not like numpy types ->
        if type(value).__name__ == 'str':
            pass
        elif 'float' in value.dtype.name:
            value = float(value)
        elif 'int' in value.dtype.name:
            value = int(value)
        # netcdf did not like NoneType so i converted it to np.nan. Here i am converting back.
        elif _np.isnan(value):
            value = None

        setattr(ts_out, atr, value)

    ni.close()
    return ts_out
Example #14
0
def write_soln_to_file(fpath, grid, soln, time, tidx, ngh=0, **kwargs):
    """Write a solution snapshot to an existing NetCDF file.

    Arguments
    ---------
    fpath : str or PathLike
        The path to the file.
    block : torchswe.mpi.data.Block
        A Block instance describing the domain decomposition of this process.
    soln : torchswe.utils.data.WHUHVModel or torchswe.utils.data.HUVModel
        The States instance containing solutions.
    time : float
        The simulation time of this snapshot.
    tidx : int
        The index of the snapshot time in the temporal axis.
    ngh : int
        The number of ghost-cell layers out side each boundary. This may be different from the ngh
        in the `block` object because some arrays (e.g., h, u, v) always have ngh = 0.
    **kwargs
        Keyword arguments sent to netCDF4.Dataset.
    """
    fpath = _Path(fpath).expanduser().resolve()

    # determine if it's a WHUHVModel or HUVModel
    if hasattr(soln, "w"):
        keys = ["w", "hu", "hv"]
    else:
        keys = ["h", "u", "v"]

    if ngh == 0:
        data = {k: soln[k] for k in keys}
    else:
        slc = slice(ngh, -ngh)  # alias for convenience; non-ghost domain
        data = {k: soln[k][slc, slc] for k in keys}

    with _Dataset(fpath, "a", parallel=True, comm=grid.comm, **kwargs) as dset:
        _add_time_data_to_dataset(
            dset, data, time, tidx, [grid.ibg, grid.ied, grid.jbg, grid.jed])
        dset.sync()
Example #15
0
def netCDF(fname, data_type = None, error_unknown_type = True,verbose = False):
    """

    Parameters
    ----------
    fname
    data_type: string
        You can overwrite the type of the file. This is handy in case you get an error that the type is unkown

    Returns
    -------

    """

    ni = _Dataset(fname, 'r')

    # for very old files which do not set this attribute yet
    if not data_type:
        try:
            data_type = ni.getncattr('_type')
        except AttributeError:
            try:
                data_type = ni.getncattr('_ts_type')
            except AttributeError:
                txt = 'File has no attribute "_type". You can set kwarg data_type if you know what the type is supposed to be. E.g. data_type = TimeSeries'
                if error_unknown_type:
                    raise TypeError(txt)
                # _warnings.warn(txt)
                # data_type = 'TimeSeries'
                # print('Warning do not seam to be working ... hier a printout of the warning: %s'%txt)

        # also for older file types
        if not data_type:
            data_type = ni.getncattr('_ts_type')
            print('pos3', data_type)

    if verbose:
        print('Type is %s.'%data_type)
    # test if type among known types
    if data_type in importable_types.keys():
        category = importable_types[data_type]['category']

    else:
        txt = 'Type %s is unknown, programming required.' % data_type
        raise TypeError(txt)

    if category == 'timeseries':
        # load time
        time_var = ni.variables['time']
        # time_var.units
        ts_time = _num2date(time_var[:], time_var.units)
        index = _pd.DatetimeIndex(ts_time)

    elif category == 'verticalprofile':
        # load altitude
        alt_var = ni.variables['altitude']
        index = alt_var[:]


    # load  data
    var_data = ni.variables['data']
    ts_data = _pd.DataFrame(var_data[:], index=index)

    # load column names
    var_data_col = ni.variables['data_columns']
    ts_data = _pd.DataFrame(var_data[:], index=index,
                           columns=var_data_col[:])

    # test which type of timeseries (1D, 2D, 3D)


    # create time series
    # if ts_type == 'Sun_Intensities_TS':
    #     ts_out = _miniSASP.Sun_Intensities_TS(ts_data)
    # if type in importable_types.keys():
    ts_out = importable_types[data_type]['call'](ts_data)
    #
    # elif ts_type == 'TimeSeries':
    #     ts_out = TimeSeries(ts_data)
    # elif ts_type == 'TimeSeries_2D':
    #     ts_out = TimeSeries_2D(ts_data)
    # elif ts_type == 'TimeSeries_3D':
    #     ts_out = TimeSeries_3D(ts_data)
    # else:
    #     txt = 'Type %s is unknown, programming required.' % type
    #     raise TypeError(txt)

    # load attributes and attach to time series
    for atr in ni.ncattrs():
        value = ni.getncattr(atr)
        # there is a bug in pandas where it does not like numpy types ->
        if type(value).__name__ == 'str':
            pass
        elif 'float' in value.dtype.name:
            value = float(value)
        elif 'int' in value.dtype.name:
            value = int(value)
        # netcdf did not like NoneType so i converted it to np.nan. Here i am converting back.
        elif _np.isnan(value):
            value = None

        setattr(ts_out, atr, value)

    ni.close()
    return ts_out
Example #16
0
def open_netCDF(fname, data_type = None, error_unknown_type = True, verbose = False):
    """

    Parameters
    ----------
    fname
    data_type: string
        You can overwrite the type of the file. This is handy in case you get an error that the type is unkown

    Returns
    -------

    """

    ni = _Dataset(fname, 'r')

    # for very old files which do not set this attribute yet
    if not data_type:
        try:
            data_type = ni.getncattr('_type')
        except AttributeError:
            try:
                data_type = ni.getncattr('_ts_type')
            except AttributeError:
                txt = 'File has no attribute "_type". You can set kwarg data_type if you know what the type is supposed to be. E.g. data_type = TimeSeries'
                if error_unknown_type:
                    raise TypeError(txt)
                # _warnings.warn(txt)
                # data_type = 'TimeSeries'
                # print('Warning do not seam to be working ... hier a printout of the warning: %s'%txt)

        # also for older file types
        if not data_type:
            data_type = ni.getncattr('_ts_type')
            print('pos3', data_type)

    if verbose:
        print('Type is %s.'%data_type)
    # test if type among known types
    if data_type in importable_types.keys():
        category = importable_types[data_type]['category']

    else:
        txt = 'Type %s is unknown, programming required.' % data_type
        raise TypeError(txt)

    if category == 'timeseries':
        # load time
        time_var = ni.variables['time']
        # time_var.units
        ts_time = _num2date(time_var[:], time_var.units)
        index = _pd.DatetimeIndex(ts_time)

    elif category == 'verticalprofile':
        # load altitude
        alt_var = ni.variables['altitude']
        index = alt_var[:]


    # load  data
    var_data = ni.variables['data']
    ts_data = _pd.DataFrame(var_data[:], index=index)

    # load column names
    var_data_col = ni.variables['data_columns']
    ts_data = _pd.DataFrame(var_data[:], index=index,
                           columns=var_data_col[:])

    # test which type of timeseries (1D, 2D, 3D)


    # create time series
    # if ts_type == 'Sun_Intensities_TS':
    #     ts_out = _miniSASP.Sun_Intensities_TS(ts_data)
    # if type in importable_types.keys():
    ts_out = importable_types[data_type]['call'](ts_data)
    #
    # elif ts_type == 'TimeSeries':
    #     ts_out = TimeSeries(ts_data)
    # elif ts_type == 'TimeSeries_2D':
    #     ts_out = TimeSeries_2D(ts_data)
    # elif ts_type == 'TimeSeries_3D':
    #     ts_out = TimeSeries_3D(ts_data)
    # else:
    #     txt = 'Type %s is unknown, programming required.' % type
    #     raise TypeError(txt)

    # load attributes and attach to time series
    for atr in ni.ncattrs():
        value = ni.getncattr(atr)
        # there is a bug in pandas where it does not like numpy types ->
        if type(value).__name__ == 'str':
            pass
        elif 'float' in value.dtype.name:
            value = float(value)
        elif 'int' in value.dtype.name:
            value = int(value)
        # netcdf did not like NoneType so i converted it to np.nan. Here i am converting back.
        elif _np.isnan(value):
            value = None

        setattr(ts_out, atr, value)

    ni.close()
    return ts_out
Example #17
0
def wrf_proj(filename):

    proj = type('WRF_PROJECTION', (), {})

    nc = _Dataset(filename, 'r')

    proj.code = int(nc.MAP_PROJ)
    proj.nx = len(nc.dimensions['west_east'])
    proj.ny = len(nc.dimensions['south_north'])
    if ('bottom_top' in nc.dimensions):
        proj.nz = len(nc.dimensions['bottom_top'])
    proj.dx = float(nc.DX)
    proj.dy = float(nc.DY)
    proj.cenlat = float(nc.CEN_LAT)
    proj.cenlon = float(nc.CEN_LON)
    proj.stdlat1 = float(nc.TRUELAT1)
    proj.stdlat2 = float(nc.TRUELAT2)
    proj.stdlon = float(nc.STAND_LON)

    if 'XLAT' in nc.variables:
        proj.xlat = _np.squeeze(nc.variables["XLAT"][:])
        tmp = _np.squeeze(nc.variables["XLONG"][:])
        proj.xlon = (tmp < 0.0) * 360.0 + tmp
    elif 'XLAT_M' in nc.variables:
        proj.xlat = _np.squeeze(nc.variables["XLAT_M"][:])
        tmp = _np.squeeze(nc.variables["XLONG_M"][:])
        proj.xlon = (tmp < 0.0) * 360.0 + tmp

    proj.lat1 = proj.xlat[0, 0]
    proj.lon1 = proj.xlon[0, 0]

    nc.close()

    if (proj.code == 1):
        proj.projection = 'lcc'
        proj.lat_0 = proj.cenlat
        proj.lon_0 = proj.cenlon
        proj.lat_1 = proj.stdlat1
        proj.lat_2 = proj.stdlat2
        proj.width = proj.dx * proj.nx
        proj.height = proj.dy * proj.ny

    elif (proj.code == 3):
        proj.projection = 'merc'
        proj.llcrnrlon = _np.min(proj.xlon) - 0.1
        proj.llcrnrlat = _np.min(proj.xlat) - 0.1
        proj.urcrnrlon = _np.max(proj.xlon) + 0.1
        proj.urcrnrlat = _np.max(proj.xlat) + 0.1

    else:
        str = 'Error message from : wrf_proj\n' + \
            '   wrf_proj is unable to handle projection code "%d"\n' % (proj.code) +\
            '   valid options are: \n' +\
            '   1 | 3 for lcc | merc'
        raise Exception(str)

    proj.resolution = 'i'
    proj.meridians = _np.arange(-180, 180, 30)
    proj.meridians_labels = [0, 0, 0, 1]
    proj.parallels = _np.arange(-90, 90, 15)
    proj.parallels_labels = [0, 1, 0, 0]
    proj.box_lat = _np.concatenate(
        (proj.xlat[0, :], _np.transpose(proj.xlat[:, -1]),
         _np.flipud(proj.xlat[-1, :]), _np.flipud(proj.xlat[:, 0])),
        axis=0)
    proj.box_lon = _np.concatenate(
        (proj.xlon[0, :], _np.transpose(proj.xlon[:, -1]),
         _np.flipud(proj.xlon[-1, :]), _np.flipud(proj.xlon[:, 0])),
        axis=0)

    proj = proj_set(proj)

    return proj
Example #18
0
def read_cdf(fname, concat = True, ignore_unknown = False, verbose = True, read_only = None):

    # list or single file
    if type(fname) == str:
        fname = [fname]
    products = {}

    #loop throuh files
    for f in fname:
        if verbose:
            print('\n', f)

        # error handling: test for netCDF file format
        if _os.path.splitext(f)[-1] != '.cdf':
            txt = '\t %s is not a netCDF file ... skipping'%f
            if verbose:
                print(txt)
            continue

        # open file

        # unfortunatly Arm data is not very uniform, therefore the following mess
        # if 'platform_id' in file_obj.ncattrs():
        #     product_id = file_obj.platform_id
        # else:
        fnt = _os.path.split(f)[-1].split('.')[0]
        foundit = False
        for prod in _arm_products.keys():
            if prod in fnt:
                product_id = prod
                foundit = True
                break
        if not foundit:
            txt = '\t has no ncattr named platform_id. Guess from file name failed ... skip'
            if verbose:
                print(txt)
            continue

        elif read_only:
            if product_id not in read_only:
                if verbose:
                    print('not in read_only')
                continue




        # Error handling: if product_id not in products
        if product_id not in _arm_products.keys():
            txt = 'Platform id %s is unknown.'%product_id
            if ignore_unknown:
                if verbose:
                    print(txt + '... skipping')
                continue
            else:
                raise KeyError(txt)

        if product_id not in products.keys():
            products[product_id] = []


        file_obj = _Dataset(f)
        out = _arm_products[product_id]['module']._parse_netCDF(file_obj)
        file_obj.close()
        products[product_id].append(out)



    if len(fname) == 1:
        return out

    else:
        if concat:
            for pf in products.keys():
                products[pf] = _arm_products[pf]['module']._concat_rules(products[pf])
        return products




################