示例#1
0
    def __init__(self, bpch_path, tracerinfo=None, diaginfo=None, mode='r',
                 timeslice=slice(None), noscale=False,
                 vertgrid='GEOS-5-REDUCED', nogroup=False, reader=None):
        bpch1kwds = OrderedDict(
            tracerinfo=tracerinfo, diaginfo=diaginfo, mode=mode,
            timeslice=timeslice, noscale=noscale,
            vertgrid=vertgrid, nogroup=nogroup
        )
        bpch2kwds = OrderedDict(
            nogroup=nogroup, noscale=noscale,
            vertgrid=vertgrid
        )
        quiet = reader is None
        if reader is None or reader == 'bpch1':
            reader1, kwds1 = bpch1, bpch1kwds
            reader2, kwds2 = bpch2, bpch2kwds
        else:
            reader2, kwds2 = bpch1, bpch1kwds
            reader1, kwds1 = bpch2, bpch2kwds

        try:
            reader1.__init__(self, bpch_path, **kwds1)
        except Exception as e:
            if not quiet:
                warn('Reverting to {} : {}'.format(reader2, str(e)))
            reader2.__init__(self, bpch_path, **kwds2)
示例#2
0
 def checkvals(ncf, method, clean, compare):
     withbnds = str('coord_bounds' in ncf.variables)
     prefix = withbnds + '&' + method + '&' + clean
     idx = ncf.val2idx('coord',
                       cvals,
                       method=method,
                       clean=clean,
                       bounds='warn')
     warn(prefix + ' got: ' + repr(idx))
     warn(prefix + ' chk: ' + repr(compare))
     assert (np.ma.allclose(compare, idx))
示例#3
0
 def _updatetime(self, write=True, create=False):
     from datetime import datetime
     t = datetime.now()
     try:
         if create:
             self.CDATE = int(t.strftime('%Y%j'))
             self.CTIME = int(t.strftime('%H%M%S'))
         if write:
             self.WDATE = int(t.strftime('%Y%j'))
             self.WTIME = int(t.strftime('%H%M%S'))
     except Exception as e:
         warn('Time could not be updated; ' + str(e))
示例#4
0
    def updatetflag(self, overwrite=None, startdate=None, tstep=None):
        if overwrite is None:
            overwrite = ('TFLAG' not in self.variables
                         or self.variables['TFLAG'].shape[1] != self.NVARS)

        if overwrite:
            if 'TFLAG' in self.variables:
                del self.variables['TFLAG']
            if startdate is not None:
                self.SDATE = int(startdate.strftime('%Y%j'))
                self.STIME = int(startdate.strftime('%H%M%S'))
            if tstep is not None:
                self.TSTEP = tstep

            times = self.getTimes()
            tvar = self.createVariable('TFLAG', 'i',
                                       ('TSTEP', 'VAR', 'DATE-TIME'))
            tvar.units = '<YYYYDDD,HHMMSS>'.ljust(16)
            tvar.long_name = 'TFLAG'.ljust(16)
            tvar.var_desc = ("Timestep-valid flags:  (1) YYYYDDD or (2) " +
                             "HHMMSS                                ")

            yyyyjjj = np.array([int(t.strftime('%Y%j')) for t in times])
            hhmmss = np.array([int(t.strftime('%H%M%S')) for t in times])

            tvar[:, :, 0] = yyyyjjj[:, None].repeat(tvar.shape[1], 1)
            tvar[:, :, 1] = hhmmss[:, None].repeat(tvar.shape[1], 1)
        else:
            if len(self.dimensions['VAR']) == 0:
                return

            try:
                times = self.getTimes()
            except Exception as e:
                warn('Times were incalculable: using epoch start\n' + str(e))
                times = np.array([datetime.datetime(1970, 1, 1)])

            if not hasattr(self, 'SDATE'):
                self.SDATE = int(times[0].strftime('%Y%j'))
            if not hasattr(self, 'STIME'):
                self.STIME = int(times[0].strftime('%H%M%S'))
            if not hasattr(self, 'TSTEP'):
                if times.size > 1:
                    dt = np.diff(times)
                    if not (dt[0] == dt).all():
                        warn('New time is unstructured')

                    tstep = int((datetime.datetime(1900, 1, 1, 0) +
                                 dt.mean()).strftime('%H%M%S'))
                    self.TSTEP = tstep
                else:
                    self.TSTEP = 10000
示例#5
0
def get_ioapi_sphere():
    import os
    ENV_IOAPI_ISPH = os.environ.get('IOAPI_ISPH', None)
    if ENV_IOAPI_ISPH is None:
        ENV_IOAPI_ISPH = '6370000.'
        warn('IOAPI_ISPH is assumed to be ' + ENV_IOAPI_ISPH +
             '; consistent with WRF')
    isph_parts = [eval(ip) for ip in ENV_IOAPI_ISPH.split(' ')]
    if len(isph_parts) > 2:
        raise ValueError('IOAPI_ISPH must be 1 or 2 parameters (got: %s)' %
                         str(isph_parts))
    elif len(isph_parts) == 2:
        return isph_parts
    elif isph_parts[0] >= 0 and isph_parts[0] < _AXIS.size:
        return _AXIS[isph_parts[0]], _BXIS[isph_parts[0]]
    else:
        return isph_parts * 2
示例#6
0
    def updatetflag(self, overwrite=None, startdate=None):
        if overwrite is None:
            overwrite = ('TFLAG' not in self.variables
                         or self.variables['TFLAG'].shape[1] != self.NVARS)

        if overwrite:
            if 'TFLAG' in self.variables:
                del self.variables['TFLAG']
            if startdate is not None:
                self.SDATE = int(startdate.strftime('%Y%j'))
                self.STIME = int(startdate.strftime('%H%M%S'))
            times = self.getTimes()
            tvar = self.createVariable('TFLAG', 'i',
                                       ('TSTEP', 'VAR', 'DATE-TIME'))
            tvar.units = '<YYYYDDD,HHMMSS>'.ljust(16)
            tvar.long_name = 'TFLAG'.ljust(16)
            tvar.var_desc = ("Timestep-valid flags:  (1) YYYYDDD or (2) " +
                             "HHMMSS                                ")

            yyyyjjj = np.array([int(t.strftime('%Y%j')) for t in times])
            hhmmss = np.array([int(t.strftime('%H%M%S')) for t in times])

            tvar[:, :, 0] = yyyyjjj[:, None].repeat(tvar.shape[1], 1)
            tvar[:, :, 1] = hhmmss[:, None].repeat(tvar.shape[1], 1)
        else:
            times = self.getTimes()

            if not hasattr(self, 'SDATE'):
                self.SDATE = int(times[0].strftime('%Y%j'))
            if not hasattr(self, 'STIME'):
                self.STIME = int(times[0].strftime('%H%M%S'))
            if times.size > 1:
                dt = np.diff(times)
                if not (dt[0] == dt).all():
                    warn('New time is unstructured')
            self.TSTEP = int(
                (datetime.datetime(1900, 1, 1, 0) + dt[0]).strftime('%H%M%S'))
示例#7
0
def add_lcc_coordinates(ifileo):
    mapdef = getmapdef(ifileo)
    gridname = mapdef.grid_mapping_name
    if 'PERIM' in ifileo.dimensions.keys():
        xdim = 'PERIM'
        ydim = 'PERIM'
        _x = np.arange(-ifileo.XCELL, (ifileo.NCOLS + 1) * ifileo.XCELL,
                       ifileo.XCELL) + ifileo.XORIG + ifileo.XCELL / 2.
        _y = np.arange(-ifileo.YCELL, (ifileo.NROWS + 1) * ifileo.YCELL,
                       ifileo.YCELL) + ifileo.YORIG + ifileo.YCELL / 2.
        bx = _x[1:]
        by = _y[0].repeat(ifileo.NCOLS + 1)
        ex = _x[-1].repeat(ifileo.NROWS + 1)
        ey = _y[1:]
        tx = _x[0:-1]
        ty = _y[-1].repeat(ifileo.NCOLS + 1)
        wx = _x[0].repeat(ifileo.NROWS + 1)
        wy = _y[:-1]
        lcc_x = x = np.concatenate([bx, ex, tx, wx])
        lcc_y = y = np.concatenate([by, ey, ty, wy])
        XCELL = ifileo.XCELL
        YCELL = ifileo.YCELL
        lcc_xe = np.array(
            [x - XCELL / 2., x + XCELL / 2., x + XCELL / 2., x - XCELL / 2.]).T
        lcc_ye = np.array(
            [y - YCELL / 2., y - YCELL / 2., y + YCELL / 2., y + YCELL / 2.]).T
        latlon_dim = ('PERIM', )
        latlone_dim = ('PERIM', 'nv')
        latlon_coord = 'PERIM'
    else:
        xdim = 'COL'
        ydim = 'ROW'
        latlon_dim = (ydim, xdim)
        latlon_coord = 'latitude longitude'
        latlone_dim = (ydim, xdim, 'nv')
        xe = np.arange(0, ifileo.NCOLS) * ifileo.XCELL + ifileo.XORIG
        ye = np.arange(0, ifileo.NROWS) * ifileo.YCELL + ifileo.YORIG
        lcc_xe, lcc_ye = np.meshgrid(xe, ye)
        lcc_xe = np.concatenate([
            lcc_xe[:, :, None], lcc_xe[:, :, None] + ifileo.XCELL,
            lcc_xe[:, :, None] + ifileo.XCELL, lcc_xe[:, :, None]
        ],
                                axis=2)
        lcc_ye = np.concatenate([
            lcc_ye[:, :, None], lcc_ye[:, :, None], lcc_ye[:, :, None] +
            ifileo.YCELL, lcc_ye[:, :, None] + ifileo.YCELL
        ],
                                axis=2)
        x = np.arange(0, ifileo.NCOLS) * ifileo.XCELL + \
            ifileo.XCELL / 2. + ifileo.XORIG
        y = np.arange(0, ifileo.NROWS) * ifileo.YCELL + \
            ifileo.YCELL / 2. + ifileo.YORIG
        lcc_x, lcc_y = np.meshgrid(x, y)

    if _withlatlon:
        if ifileo.GDTYP == 2:
            mapstrs = [
                '+proj=lcc',
                '+a=%s' % mapdef.semi_major_axis,
                '+b=%s' % mapdef.semi_minor_axis,
                '+lon_0=%s' % mapdef.longitude_of_central_meridian,
                '+lat_1=%s' % mapdef.standard_parallel[0],
                '+lat_2=%s' % mapdef.standard_parallel[1],
                '+lat_0=%s' % mapdef.latitude_of_projection_origin
            ]
            mapstr = ' '.join(mapstrs)
            mapproj = pyproj.Proj(mapstr)
        elif ifileo.GDTYP == 6:
            mapstr = ('+proj=stere +a={3} +b={4} ' +
                      '+lon_0={0} +lat_0={1} +lat_ts={2}').format(
                          mapdef.straight_vertical_longitude_from_pole,
                          mapdef.latitude_of_projection_origin,
                          mapdef.standard_parallel[0], mapdef.semi_major_axis,
                          mapdef.semi_minor_axis)
            mapproj = pyproj.Proj(mapstr)
        elif ifileo.GDTYP == 7:
            mapstr = '+proj=merc +a=%s +b=%s +lat_ts=0 +lon_0=%s' % (
                mapdef.semi_major_axis, mapdef.semi_minor_axis,
                mapdef.longitude_of_central_meridian)
            mapproj = pyproj.Proj(mapstr)
        elif ifileo.GDTYP == 1:

            def mapproj(x, y, inverse):
                return (x, y)

        lon, lat = mapproj(lcc_x, lcc_y, inverse=True)
        lone, late = mapproj(lcc_xe.ravel(), lcc_ye.ravel(), inverse=True)
        lone = lone.reshape(*lcc_xe.shape)
        late = late.reshape(*lcc_ye.shape)

    if 'x' not in ifileo.variables.keys() and xdim in ifileo.dimensions:
        """
        Not necessary for cdo
        """
        var = ifileo.createVariable('x', x.dtype.char, (xdim, ))
        var[:] = x[:]
        var.units = 'km'
        var._CoordinateAxisType = "GeoX"
        var.long_name = ("synthesized coordinate from XORIG XCELL " +
                         "global attributes")

    if 'y' not in ifileo.variables.keys() and ydim in ifileo.dimensions:
        """
        Not necessary for cdo
        """
        var = ifileo.createVariable('y', x.dtype.char, (ydim, ))
        var[:] = y[:]
        var.units = 'km'
        var._CoordinateAxisType = "GeoY"
        var.long_name = ("synthesized coordinate from YORIG YCELL " +
                         "global attributes")

    if _withlatlon and 'latitude' not in ifileo.variables.keys():
        var = ifileo.createVariable('latitude', lat.dtype.char, latlon_dim)
        var[:] = lat
        var.units = 'degrees_north'
        var.standard_name = 'latitude'
        var.bounds = 'latitude_bounds'
        var.coordinates = latlon_coord

    if _withlatlon and 'longitude' not in ifileo.variables.keys():
        var = ifileo.createVariable('longitude', lon.dtype.char, latlon_dim)
        var[:] = lon
        var.units = 'degrees_east'
        var.standard_name = 'longitude'
        var.bounds = 'longitude_bounds'
        var.coordinates = latlon_coord

    if _withlatlon:
        for dk, dl in zip(latlone_dim, late.shape):
            if dk not in ifileo.dimensions:
                ifileo.createDimension(dk, dl)

    if _withlatlon and 'latitude_bounds' not in ifileo.variables.keys():
        var = ifileo.createVariable('latitude_bounds', lat.dtype.char,
                                    latlone_dim)
        var[:] = late
        var.units = 'degrees_north'
        var.standard_name = 'latitude_bounds'

    if _withlatlon and 'longitude_bounds' not in ifileo.variables.keys():
        var = ifileo.createVariable('longitude_bounds', lon.dtype.char,
                                    latlone_dim)
        var[:] = lone
        var.units = 'degrees_east'
        var.standard_name = 'longitude_bounds'

    for varkey in ifileo.variables.keys():
        var = ifileo.variables[varkey]
        # this must have been a fix for dictionaries that
        # reproduced variables on demand
        # we should find a better fix for this
        # try:
        #    ifileo.variables[varkey] = var
        # except Exception:
        #    pass
        olddims = list(var.dimensions)
        if _withlatlon:

            def io2cf(x):
                return {
                    'ROW': 'latitude',
                    'COL': 'longitude',
                    'TSTEP': 'time',
                    'LAY': 'level'
                }.get(x, x)

            dims = [io2cf(d) for d in olddims]
        dims = [d for d in dims]  # Why was I excluding time  if d != 'time'
        if olddims != dims:
            if (varkey not in ('latitude', 'longitude')
                    and ('PERIM' in dims or
                         ('latitude' in dims and 'longitude' in dims))):
                try:
                    var.coordinates = ' '.join(dims)
                    var.grid_mapping = gridname
                except Exception as e:
                    warn(('coordinates="{0}" and gridmapping="{1}" ' +
                          'not added to variables:\n\t{3}').format(
                              ' '.join(dims), gridname, varkey, e),
                         category=UserWarning)
示例#8
0
from PseudoNetCDF.pncwarn import warn
import numpy as np

_withlatlon = False
try:
    import pyproj
    _withlatlon = True
except Exception:
    warn('pyproj could not be found, so IO/API coordinates cannot be ' +
         'converted to lat/lon; to fix, install pyproj or basemap ' +
         '(e.g., `pip install pyproj)`')


def add_lay_coordinates(ifileo):
    if 'LAY' in ifileo.dimensions:
        nlay = len(ifileo.dimensions['LAY'])
    elif hasattr(ifileo, 'NLAYS'):
        nlay = ifileo.NLAYS
    elif hasattr(ifileo, 'VGLVLS'):
        nlay = len(ifileo.VGLVLS) - 1
    else:
        return
    if 'layer' not in ifileo.variables.keys():
        var = ifileo.createVariable('layer', 'd', ('LAY', ))
        var[:] = np.arange(nlay, dtype='d')
        var.units = 'model layers'
        var.standard_name = 'layer'
    if 'level' not in ifileo.variables.keys():
        var = ifileo.createVariable('level', 'd', ('LAY', ))
        if hasattr(ifileo, 'VGLVLS'):
            var[:] = (ifileo.VGLVLS[:-1] + ifileo.VGLVLS[1:]) / 2
示例#9
0
    def ll2ij(self, lon, lat, bounds='error', clean='none'):
        """
        Converts lon/lat to 0-based indicies (0,M), (0,N)

        Parameters
        ----------
        lon : scalar or iterable of longitudes in decimal degrees
        lat : scalar or iterable of latitudes in decimal degrees
        bounds : ignore, error, warn if i,j are out of domain

        Returns
        -------
        i, j : indices (0-based) for variables
        """
        lon = np.asarray(lon)
        lat = np.asarray(lat)

        lonc = self.variables['lon']
        latc = self.variables['lat']

        dlon = np.median(np.diff(lonc)) / 2.
        dlat = np.median(np.diff(latc)) / 2.

        lonb = np.array([lonc - dlon, lonc + dlon]).T
        latb = np.array([latc - dlat, latc + dlat]).T

        spi = latb[:, 0] < -90
        npi = latb[:, 0] > 90

        latb[spi, 0] = latc[spi] - dlon / 2.
        latb[spi, 1] = latc[spi] + dlon / 2.
        latb[npi, 0] = latc[npi] - dlon / 2.
        latb[npi, 1] = latc[npi] + dlon / 2.

        easter = lon[:, None] >= lonb[:, 0]
        wester = lon[:, None] < lonb[:, 1]
        loni, gi = np.where(easter & wester)
        i = np.ma.masked_all(lon.shape, dtype='i')
        i[loni] = gi
        norther = lat[:, None] >= latb[:, 0]
        souther = lat[:, None] < latb[:, 1]
        latj, gj = np.where(norther & souther)
        j = np.ma.masked_all(lat.shape, dtype='i')
        j[latj] = gj

        nx = lonb.shape[0]
        ny = latb.shape[0]
        if bounds == 'ignore':
            pass
        else:
            missi, = np.where(i.mask)
            missj, = np.where(j.mask)
            lowi, = np.where(~easter[missi].any(1))
            highi, = np.where(~wester[missi].any(1))
            lowj, = np.where(~norther[missj].any(1))
            highj, = np.where(~souther[missj].any(1))
            outb = (lowi | lowj | highi | highj)
            nout = outb.sum()
            if nout > 0:
                message = '{} Points out of bounds; {}'.format(
                    nout, np.where(outb))
                if bounds == 'error':
                    raise ValueError(message)
                else:
                    warn(message)

        if clean == 'clip':
            i[lowi] = 0
            i[highi] = nx - 1
            j[lowj] = 0
            j[highj] = ny - 1
        else:
            # Mask or nothing should both create symmetric masks
            mask = (np.ma.getmaskarray(i) | np.ma.getmaskarray(j))
            i = np.ma.masked_where(mask, i)
            j = np.ma.masked_where(mask, j)

        return i, j
示例#10
0
def _tryset(obj, pk, pv, prefix='obj'):
    try:
        setattr(obj, pk, pv)
    except Exception as e:
        warn('{}.{} not set {}; value {} was lost'.format(prefix, pk, e, pv))
示例#11
0
    def sliceDimensions(self, *args, **kwds):
        """
        Wrapper PseudoNetCDFFile.sliceDimensions that corrects ROW, COL,
        LAY and TIME meta-data according to the ioapi format

        Parameters
        ----------
        see PseudoNetCDFFile.sliceDimensions
        """
        # First slice as normal
        outf = PseudoNetCDFFile.sliceDimensions(self, *args, **kwds)
        # Copy slice keywords excluding newdims
        dimslices = kwds.copy()
        dimslices.pop('newdims', None)

        # Identify array indices and the need for fancy indexing
        isarray = {
            dk: not np.isscalar(dv) and not isinstance(dv, slice)
            for dk, dv in dimslices.items()
        }
        # anyisarray = np.sum(list(isarray.values())) > 1

        # Check if COL or ROW was used
        hascol = 'COL' in dimslices
        hasrow = 'ROW' in dimslices
        deleterowcol = False
        if hascol and hasrow:
            if isarray['ROW'] and isarray['COL']:
                newdims = kwds.get('newdims', ('POINTS', ))
                if 'ROW' not in newdims and 'COL' not in newdims:
                    deleterowcol = True

        # If lay was subset, subset VGLVLS too
        if 'LAY' in kwds:
            nlvls = outf.VGLVLS.size
            lidx = np.array(np.arange(outf.VGLVLS.size - 1)[kwds['LAY']],
                            ndmin=1)
            tmpvglvls = outf.VGLVLS[lidx]
            if lidx[-1] < (nlvls - 1):
                try:
                    endl = outf.VGLVLS[lidx[-1] + 1]
                    tmpvglvls = np.append(tmpvglvls, endl)
                    outf.VGLVLS = tmpvglvls
                except Exception:
                    warn('VGLVLS could not be diagnosed; update manually')
        # If subsetting replaces ('ROW', 'COL') ... for example with ('PERIM',)
        # remove the dimensions
        if deleterowcol:
            del outf.dimensions['COL']
            del outf.dimensions['ROW']
        else:
            # Update origins
            if 'COL' in kwds and 'COL' in outf.dimensions:
                ncol = len(self.dimensions['COL'])
                outf.XORIG += np.arange(ncol)[kwds['COL']].take(0) * outf.XCELL
            if 'ROW' in kwds and 'ROW' in outf.dimensions:
                nrow = len(self.dimensions['ROW'])
                outf.YORIG += np.arange(nrow)[kwds['ROW']].take(0) * outf.YCELL

        # Update TFLAG, SDATE, STIME and TSTEP
        if 'TSTEP' in kwds:
            import datetime
            times = np.atleast_1d(self.getTimes()[kwds['TSTEP']])
            outf.SDATE = int(times[0].strftime('%Y%j'))
            outf.STIME = int(times[0].strftime('%H%M%S'))
            if times.size > 1:
                dt = np.diff(times)
                if not (dt[0] == dt).all():
                    warn('New time is unstructured')
                outf.TSTEP = int((datetime.datetime(1900, 1, 1, 0) +
                                  dt[0]).strftime('%H%M%S'))

        outf.updatemeta()
        return outf
示例#12
0
def cdtoms(path, outfile=None):
    if outfile is None:
        outfile = PseudoNetCDFFile()
    inlines = open(path, 'r').readlines()
    dayline = inlines[0]
    daygrp = _groupdict(dayre, dayline)

    sdate = 0
    if 'daystring' in daygrp:
        date = datetime.strptime(daygrp['daystring'], '%b %d, %Y')
        rdate = datetime(1970, 1, 1)
        sdate = (date - rdate).total_seconds()
    else:
        import pandas as pd
        dayparts = dayline.split(' ')
        for i in [3, 2, 1]:
            daystr = ' '.join(dayparts[:i])
            try:
                date = pd.to_datetime(daystr, box=False)
                break
            except Exception as e:
                print(e)
        else:
            date = np.datetime64('1970-01-01')
        rdate = np.datetime64('1970-01-01')
        sdate = (date - rdate).astype('d') / 1e9

    longrp = _groupdict(edgere, inlines[1])
    latgrp = _groupdict(edgere, inlines[2])

    for propdict in [daygrp, longrp, latgrp]:
        for k, v in propdict.items():
            try:
                v = eval(v)
            except Exception:
                pass
            setattr(outfile, k, v)
    outfile.HISTORY = ''.join(inlines[:3])

    blat = latgrp.get('start', '59.5')
    bsn = latgrp.get('startdir', 'S')
    elat = latgrp.get('end', '59.5')
    esn = latgrp.get('enddir', 'N')
    latstep = float(latgrp.get('step', '1'))
    blat = {'N': 1, 'S': -1}[bsn] * float(blat)
    elat = {'N': 1, 'S': -1}[esn] * float(elat)

    blon = longrp.get('start', '179.375')
    bwe = longrp.get('startdir', 'W')
    elon = longrp.get('end', '179.375')
    ewe = longrp.get('enddir', 'E')
    lonstep = float(longrp.get('step', '1.25'))
    blon = {'E': 1, 'W': -1}[bwe] * float(blon)
    elon = {'E': 1, 'W': -1}[ewe] * float(elon)

    datalines = inlines[3:]
    lats = []
    for i, line in enumerate(datalines):
        if 'lat' not in line:
            datalines[i] = line[1:-1].rstrip()
        else:
            data, lat = line.split('lat =')
            datalines[i] = data[1:-1].rstrip()
            lats.append(lat.strip())

    nlats = len(lats)
    datablock = ''.join(datalines).replace(' ', '0')
    nlons = len(datablock) // 3 // nlats
    outfile.createDimension('time', 1)
    outfile.createDimension('latitude', nlats)
    outfile.createDimension('longitude', nlons)
    outfile.createDimension('nv', 2)

    var = outfile.createVariable('time', 'f', ('time', ))
    var.units = 'seconds since 1970-01-01 00:00:00+0000'
    var[:] = sdate

    var = outfile.createVariable('latitude', 'f', ('latitude', ))
    var.units = 'degrees N'
    var[:] = np.arange(blat, elat + latstep, latstep)
    lat = var
    linelat = np.array(lats, dtype='f')
    if not (lat[:] == linelat).all():
        warn('Header metadata does not match lats')
        lat[:] = linelat

    var = outfile.createVariable('latitude_bounds', 'f', ('latitude', 'nv'))
    var.units = 'degrees N'
    var[:, 0] = lat - latstep / 2.
    var[:, 1] = lat + latstep / 2.

    var = outfile.createVariable('longitude', 'f', ('longitude', ))
    var.units = 'degrees E'
    lon = var[:] = np.arange(blon, elon + lonstep, lonstep)

    var = outfile.createVariable('longitude_bounds', 'f', ('longitude', 'nv'))
    var.units = 'degrees E'
    var[:, 0] = lon - lonstep / 2.
    var[:, 1] = lon + lonstep / 2.

    var = outfile.createVariable('ozone',
                                 'f', ('latitude', 'longitude'),
                                 missing_value=999)
    var.units = 'matm-cm'
    var.long_name = var.var_desc = 'ozone'.ljust(16)
    var[:] = np.ma.masked_values(
        np.array([i for i in datablock], dtype='S1').view('S3').astype('i'),
        var.missing_value).reshape(nlats, nlons)

    return outfile
示例#13
0
    def __init__(self, path, debug=False, na_values=['\s+', '*', '99999']):
        """
        path - path or file-like object.
        """
        try:
            import pandas as pd
        except Exception:
            raise ImportError('woudcsonde requires pandas; ' +
                              'install pandas (e.g., pip install pandas)')
        if hasattr(path, 'readline'):
            myf = path
        else:
            myf = open(path, 'rU')

        meta = ''
        li = 0
        self.createDimension('site', 1)
        self.createDimension('flight', 1)
        self.createDimension('STRLEN', 64)
        for l in myf:
            key = l.strip().upper()
            if key == '#PROFILE':
                break
            elif key.startswith('#'):
                if debug:
                    print(key)
                lines, newblock = self._addmetavars(myf, key[1:] + '_')
                li += lines
                if newblock == 'PROFILE_':
                    li -= 1
                    break
            else:
                meta += l
                li += 1

        # after #PROFILE loop is broken
        myf.close()
        readopts = dict(sep='\s*,',
                        skiprows=li + 1,
                        engine='python',
                        na_values=na_values,
                        comment='*')
        data = pd.read_csv(path, **readopts)
        # indkey = data.columns[0]
        indkey = 'level'
        self.createDimension(indkey, len(data))
        self.metadata = meta
        for key in data.columns:
            try:
                values = data[key]
                var = self.createVariable(key, 'f', (
                    'flight',
                    indkey,
                ))
                var.units = _units.get(key, 'unknown')
                var.long_name = key
                var[:] = values
            except Exception as e:
                warn(str(e) + '; ' + key + ' will not be written')

        datebytes = self.variables['TIMESTAMP_Date'].view('S64')[0, 0]
        datestr = datebytes.decode().strip()
        date = '-'.join(['%02d' % int(v) for v in datestr.split('-')])
        timebytes = self.variables['TIMESTAMP_Time'].view('S64')[0, 0]
        timestr = timebytes.decode().strip()
        time = (timestr + ':00')[:8]
        if time == ':00':
            time = '00:00:00'
        z = self.variables['TIMESTAMP_UTCOffset'].view('S64')[
            0, 0].decode().strip()
        if z.startswith('+'):
            z = z[:3] + '00'
        elif z.startswith('-'):
            z = z[:3] + '00'
        else:
            z = '+' + z[:2] + '00'
        datestr = '{} {}{}'.format(date, time, z)
        rdatestr = '1970-01-01 00:00:00+0000'
        rdate = datetime.strptime(rdatestr, '%Y-%m-%d %H:%M:%S%z')
        outdate = datetime.strptime(datestr, '%Y-%m-%d %H:%M:%S%z')
        dt = (outdate - rdate).total_seconds()
        tvar = self.createVariable('time', 'd', ('flight', ))
        tvar.long_name = 'time'
        tvar.units = 'seconds since ' + rdatestr
        tvar[...] = dt
示例#14
0
    def _addmetavars(self, myf, key=''):
        """Adds metavariables for next two lines"""
        try:
            import pandas as pd
        except Exception:
            raise ImportError('woudcsonde requires pandas; ' +
                              'install pandas (e.g., pip install pandas)')
        metalines = myf.readline()
        nmetalines = 2
        newblock = ''
        while True:
            nextline = myf.readline()
            nstrip = nextline.strip()
            nmetalines += 1
            if nstrip == '':
                break
            elif nstrip.startswith('*'):
                break
            elif nstrip.startswith('#PROFILE'):
                newblock = nstrip[1:] + '_'
                break
            elif nstrip.startswith('#'):
                nmetalines -= 1
                newblock = nstrip.upper()[1:] + '_'
                nlines, newblock = self._addmetavars(myf, key=newblock)
                nmetalines += nlines
                break
            else:
                metalines += nextline

        if key == 'TIMESTAMP_':
            dtype = 'S64'
        else:
            dtype = None
        try:
            locdata = pd.read_csv(io.BytesIO((metalines).encode()),
                                  dtype=dtype)
        except Exception as e:
            warn(key + ': ' + str(e))
            return nmetalines
        if key in ('PLATFORM_', 'LOCATION_'):
            dim1 = 'site'
        else:
            dim1 = 'flight'
        for colkey in locdata.columns:  # dtype.names:
            val = locdata[colkey]
            varkey = key + colkey
            dt = val.dtype.char
            odt = {'O': 'c'}.get(dt, dt)
            dim = {
                'c': (dim1, 'STRLEN'),
                'S': (dim1, 'STRLEN')
            }.get(odt, (dim1, ))
            if val.size > 1:
                if not key[:-1] in self.dimensions:
                    self.createDimension(key[:-1], val.size)
                dim = dim + (key[:-1], )
            var = self.createVariable(varkey, odt, dim)
            var.units = _units.get(colkey, 'unknown')
            var.long_name = colkey
            if odt in ('c', 'S'):
                var[:] = np.array([val]).astype('S64').view('S1')
            else:
                var[:] = val
        return nmetalines, newblock