Exemplo n.º 1
0
def plotts(args):
    ifiles = args.ifiles
    times = [gettimes(ifile) for ifile in ifiles]
    fig = plt.figure()
    ax = fig.add_subplot(111)  # add_axes([.1, .15, .8, .8])
    ax.set_xlabel('Time (UTC)')
    split = 25
    for target in args.variables:
        vars = [ifile.variables[target] for ifile in ifiles]
        unit = getattr(vars[0], 'units', 'unknown')
        ax.set_ylabel(target + '(' + unit + ')')
        del ax.lines[:]
        nvars = len(vars)
        varwidth = .8 / nvars / 1.1
        po = np.arange(24) + 0.1 + varwidth / 2
        for vi, (time, var) in enumerate(zip(times, vars)):
            vals = var[:]
            if args.squeeze:
                vals = vals.squeeze()
            vardesc = getattr(var, 'description', None)
            varb = ax.plot(time, vals[:], label=vardesc)
        # plt.setp(ax.xaxis.get_ticklabels(),rotation = 45)
        plt.legend()
        figpath = args.outpath + target + '.' + args.figformat
        for pc in args.plotcommands:
            exec(pc, globals(), locals())
        fig.savefig(figpath)
        if args.verbose > 0:
            print('Saved fig', figpath)
        print(figpath)
Exemplo n.º 2
0
def make1d(ifile, args):
    if len(args.figure_keywords) > 0:
        plt.setp(fig, **args.figure_keywords)
    if len(args.axes_keywords) > 0:
        plt.setp(ax, **args.axes_keywords)
    for fi, ifile in enumerate(ifiles):
        variables = args.variables
        if variables is None:
            variables = [
                key for key, var in ifile.variables.items() if var.ndim == 2
            ]
        if len(variables) == 0:
            raise ValueError(
                'Unable to heuristically determin plottable variables; use -v to specify variables for plotting'
            )
        for varkey in variables:
            if not args.overlay:
                ax.cla()
            var = ifile.variables[varkey]
            vals = var[:]
            if args.squeeze:
                vals = vals.squeeze()
            label = getattr(var, 'standard_name', varkey).strip()
            varunit = getattr(var, 'units', 'unknown').strip()
            print(varkey, sep='')
            dimkey = var.dimensions[0]
            ax.set_xlabel(dimkey)
            if args.time:
                x = gettimes(ifile)
            elif dimkey in ifile.variables:
                x = ifile.variables[var.dimensions[0]][:]
            else:
                x = np.arange(var.shape[0])

            patches = ax.plot(x, vals, label=label)
            ax.set_ylabel(varunit)
            plt.legend()
            fmt = 'png'
            outpath = args.outpath
            if len(ifiles) > 1:

                lstr = str(fi).rjust(len(str(len(ifiles))), '0')
            else:
                lstr = ''

            figpath = os.path.join(outpath + varkey + lstr + '.' + fmt)
            if args.interactive:
                csl = PNCConsole(locals=globals())
                csl.interact()

            fig.savefig(figpath)
            if args.verbose > 0:
                print('Saved fig', figpath)
Exemplo n.º 3
0
def plot_diurnal_box(ifiles, args):
    times = [gettimes(ifile) for ifile in ifiles]
    hours = [np.array([t.hour for t in time]) for time in times]
    fig = plt.figure()
    sax = fig.add_subplot(111)
    if len(args.figure_keywords) > 0:
        plt.setp(fig, **args.figure_keywords)
    if len(args.axes_keywords) > 0:
        plt.setp(sax, **args.axes_keywords)
    sax.set_xlabel('Time (UTC)')
    split = 25
    for target in args.variables:
        vars = [ifile.variables[target] for ifile in ifiles]
        unit = getattr(vars[0], 'units', 'unknown')
        sax.set_ylabel(target + ' (' + unit + ')')
        vals = [var[:] for var in vars]
        hvars = [[np.ma.compressed(val[hour == i]) for i in range(24)]
                 for hour, val in zip(hours, vals)]
        del sax.lines[:]
        nvars = len(vars)
        varwidth = .8 / nvars / 1.1
        po = np.arange(24) + 0.1 + varwidth / 2
        ncolors = max(float(nvars), 10)
        try:
            from cycler import cycler
            props = cycler('color', [
                plt.get_cmap()((nc + .5) / float(ncolors))
                for nc in range(ncolors)
            ])
        except:
            props = [
                dict(color=plt.get_cmap()((nc + .5) / float(ncolors)))
                for nc in range(ncolors)
            ]
        for vi, (var, propd) in enumerate(zip(hvars, props)):
            varb = sax.boxplot(var,
                               positions=po + vi * varwidth * 1.1,
                               widths=varwidth,
                               patch_artist=True)
            plt.setp([i for i in varb.values()], **propd)
            plt.setp(varb['medians'], color='k')
            plt.setp(varb['fliers'], markeredgecolor=propd['color'])
        sax.set_xlim(-.5, 24.5)
        sax.set_xticks(range(0, 25))
        sax.set_xticklabels([str(i) for i in range(0, 25)])
        #plt.setp(sax.xaxis.get_ticklabels(),rotation = 45)
        figpath = args.outpath + target + '.' + args.figformat
        fig.savefig(figpath)
        if args.verbose > 0: print('Saved fig', figpath)
Exemplo n.º 4
0
def writearlpackedbit(infile, path):
    """
    path - path to existing arl packed bit file or location for new file
    infile - NetCDF-like file with
        - vertical 2-D (surface) and 3-D (layers) variables
          with 4 character names
        - a z variable with vertical coordinates
        - all properties from the first index record

    """
    requiredkeys = list(thdtype.names)
    for key in requiredkeys:
        getattr(infile, key)

    svars = {}
    lvars = {}
    props = {}
    sfckeys = props['sfckeys'] = []
    laykeys = props['laykeys'] = []
    for vark, var in infile.variables.items():
        if len(var.shape) == 3:
            svars[vark] = var
            sfckeys.append(vark.encode('ascii'))
        elif len(var.shape) == 4:
            lvars[vark] = var
            lvar = var
            laykeys.append(vark.encode('ascii'))
        else:
            pass

    vglvls = np.append(float(infile.SFCVGLVL),
                       infile.variables['z'][:].array())
    # vgtxts = getvgtxts(vglvls)
    # plus one includes surface
    props['NZ'] = lvar.shape[1] + 1
    props['NY'] = lvar.shape[2]
    props['NX'] = lvar.shape[3]
    datamap = maparlpackedbit(path,
                              mode='write',
                              shape=(lvar.shape[0], ),
                              **props)

    theads = datamap['timehead']
    # for key in thdtype.names:
    #    theads[key] = getattr(infile, key)

    # vardefs = datamap['vardef']
    # for ti, vardef in enumerate(vardefs):
    #     sfcvardeftxt = vgtxts[0] + '%2d' % len(svars) +
    #                    ''.join(['%-4s -1 ' % skey.decode()
    #                             for skey in sfckeys])
    #     layvardeftxt = ''
    #     for vgtxt in vgtxts[1:]:
    #         layvardeftxt += vgtxt + '%2d' % len(lvars) +
    #                         ''.join(['%-4s -1 ' % lkey.decode()
    #                                  for lkey in laykeys])
    #
    #
    # vardeftxt = sfcvardeftxt + layvardeftxt
    # defsize = 8+8*len(svars)+(8+8*len(lvars))*len(vgtxts[1:])
    # assert(len(vardeftxt) == defsize)
    # vardefs[:] = vardeftxt
    YYMMDDHHFF = getattr(infile, 'YYMMDDHHFF', '0000000000')
    FF = YYMMDDHHFF[-2:]
    times = gettimes(infile)

    checksums = {}

    for ti, (time, thead) in enumerate(zip(times, theads)):
        for propk in thead.dtype.names:
            if propk in ('NX', 'NY', 'NZ'):
                thead[propk] = '%3d' % props[propk]
            elif propk == 'LENH':
                thead[propk] = '%4d' % datamap['vardef'][ti].itemsize
            else:
                thead[propk] = getattr(infile, propk)
        timestr = time.strftime('%y%m%d%H').encode('ascii') + FF
        thead['YYMMDDHHFF'] = timestr
        for sfck in sfckeys:
            invar = infile.variables[sfck.decode()]
            var_time = datamap['surface'][sfck.decode()]
            varhead = var_time['head']

            _skipprop = ('YYMMDDHHFF', 'LEVEL', 'EXP', 'PREC', 'VAR1')
            for varpropk in varhead.dtype.names:
                if varpropk not in _skipprop:
                    varhead[varpropk][ti] = getattr(invar, varpropk)

            indata = invar[ti]
            CVAR, PREC, NEXP, VAR1, KSUM = pack2d(indata, verbose=False)

            varhead['YYMMDDHHFF'][ti] = timestr
            varhead['LEVEL'][ti] = '%2d' % 0
            varhead['PREC'][ti] = '%14.7E' % PREC
            varhead['EXP'][ti] = '%4d' % NEXP
            varhead['VAR1'][ti] = '%14.7E' % VAR1
            checksums[vglvls[0], sfck] = KSUM
            var_time['data'][ti] = CVAR
        for layk in laykeys:
            invar = infile.variables[layk.decode()]
            var_time = datamap['layers'][layk.decode()][ti]
            for li, var_time_lay in enumerate(var_time):
                varhead = var_time_lay['head']
                for varpropk in varhead.dtype.names:
                    if varpropk not in _skipprop:
                        varhead[varpropk] = getattr(invar, varpropk)

                indata = invar[ti, li]
                CVAR, PREC, NEXP, VAR1, KSUM = pack2d(indata)

                varhead['YYMMDDHHFF'] = timestr
                varhead['LEVEL'] = '%2d' % (li + 1)
                var_time_lay['data'] = CVAR
                varhead['PREC'] = '%14.7E' % PREC
                varhead['EXP'] = '%4d' % NEXP
                varhead['VAR1'] = '%14.7E' % VAR1
                vglvl = vglvls[li + 1]
                checksums[vglvl, layk] = KSUM

        keys = {vglvls[0]: sfckeys}
        for vglvl in vglvls[1:]:
            keys[vglvl] = laykeys
        vardef = writevardef(vglvls, keys, checksums)

        datamap['vardef'][ti] = ' '.ljust(datamap['vardef'][ti].itemsize)
        datamap['hdr'][ti] = ' '.ljust(datamap['hdr'][ti].itemsize)
        datamap['vardef'][ti] = vardef.encode('ascii')
        thead['LENH'] = datamap['vardef'][ti].itemsize

    datamap.flush()
Exemplo n.º 5
0
def plot(ifiles, args):
    from PseudoNetCDF.coordutil import getsigmamid, getpresmid, gettimes
    import pylab as pl
    from pylab import figure, NullFormatter, close, rcParams
    rcParams['text.usetex'] = False
    from matplotlib.colors import LinearSegmentedColormap, BoundaryNorm, LogNorm
    scale = args.scale;
    minmax = eval(args.minmax)
    minmaxq = eval(args.minmaxq)
    sigma = args.sigma
    maskzeros = args.maskzeros
    outunit = args.outunit
    tespaths = args.tespaths
    omipaths = args.omipaths
    edges = args.edges
    try:
        f, = ifiles
    except:
        raise ValueError('curtain plot expects one file when done. Try stack time --stack=time to concatenate')

    # Add CF conventions if necessary
    if 'latitude_bounds' not in f.variables.keys():
        try:
            from PseudoNetCDF import getvarpnc
            from PseudoNetCDF.conventions.ioapi import add_cf_from_ioapi
            f = getvarpnc(f, None)
            add_cf_from_ioapi(f)
        except:
            pass
    if sigma:
        vertcrd = getsigmamid(f)
    else:
        vertcrd = getpresmid(f, pref = 101325., ptop = getattr(f, 'VGTOP', 10000))
        if vertcrd.max() > 2000:  vertcrd /= 100.

    try:
        lonb = f.variables['geos_longitude_bounds']
        latb = f.variables['geos_latitude_bounds']
    except:
        lonb = f.variables['longitude_bounds']
        latb = f.variables['latitude_bounds']
    for var_name in args.variables:
        temp = defaultdict(lambda: 1)
        try:
            eval(var_name, None, temp)
            var = eval(var_name, None, f.variables)[:]
        except:
            temp[var_name]
            var = f.variables[var_name][:]
        if maskzeros: var = np.ma.masked_values(var, 0)
        unit = f.variables[temp.keys()[0]].units.strip()
        if unit in unitconvert:
            var = unitconvert.get((unit, outunit), lambda x: x)(var)
        else:
            outunit = unit
        bmap = None
        vmin, vmax = np.percentile(np.ma.compressed(var).ravel(), list(minmaxq))
        if minmax[0] is not None:
            vmin = minmax[0]
        if minmax[1] is not None:
            vmax = minmax[1]
        if edges:
            fig = pl.figure(figsize = (16, 4))
            offset = 0.05
            ax = fig.add_axes([.1 - offset, .15, .22, .725])
            ax = fig.add_axes([.325 - offset, .15, .22, .725])
            ax = fig.add_axes([.55 - offset, .15, .22, .725])
            ax = fig.add_axes([.775 - offset, .15, .22, .725])
            ss = 0
            se = ss + f.NCOLS + 1
            es = se
            ee = se + f.NROWS + 1
            ns = ee
            ne = ee + f.NCOLS + 1
            ws = ne
            we = ws + f.NROWS + 1
            axs = fig.axes
            for ax in fig.axes[1:]:
                ax.yaxis.set_major_formatter(pl.NullFormatter())
            
            vars = [var[:, :, ss:se], var[:, :, es:ee], var[:, :, ns:ne][:, :, ::-1], var[:, :, ws:we][:, :, ::-1]]
            lonbss = [lonb[ss:se], lonb[es:ee], lonb[ns:ne][::-1], lonb[ws:we][::-1]]
            latbss = [latb[ss:se], latb[es:ee], latb[ns:ne][::-1], latb[ws:we][::-1]]
            
        else:
            fig = pl.figure(figsize = (8, 4))
            ax = fig.add_axes([.1, .15, .8, .725])
            axs = fig.axes
            vars = [var]
            lonbss = [lonb[:]]
            latbss = [latb[:]]
        for ax, var, lonbs, latbs in zip(axs, vars, lonbss, latbss):
            vals = var.swapaxes(0, 1).reshape(var.shape[1], -1)
            ax.text(.05, .9, 'n = %d' % vals.shape[1], transform = ax.transAxes)
            modl, modr = minmaxmean(ax, vals, vertcrd, facecolor = 'k', edgecolor = 'k', alpha = .2, zorder = 4, label = 'GC', ls = '-', lw = 2, color = 'k')
            llines = [(modl, modr)]
            ymin, ymax = vertcrd.min(), vertcrd.max()
            ax.set_ylim(ymax, ymin)
            ax.set_xscale(scale)
            ax.set_xlim(vmin, vmax)
            #if scale == 'log':
            #    ax.set_xticklabels(['%.1f' % (10**x) for x in ax.get_xticks()])
            
            if 'TFLAG' in f.variables.keys():
                SDATE = f.variables['TFLAG'][:][0, 0, 0]
                EDATE = f.variables['TFLAG'][:][-1, 0, 0]
                STIME = f.variables['TFLAG'][:][0, 0, 1]
                ETIME = f.variables['TFLAG'][:][-1, 0, 1]
                if SDATE == 0:
                    SDATE = 1900001
                    EDATE = 1900001
                sdate = datetime.strptime('%07d %06d' % (SDATE, STIME), '%Y%j %H%M%S')
                edate = datetime.strptime('%07d %06d' % (EDATE, ETIME), '%Y%j %H%M%S')
            elif 'tau0' in f.variables.keys():
                sdate = datetime(1985, 1, 1, 0) + timedelta(hours = f.variables['tau0'][0])
                edate = datetime(1985, 1, 1, 0) + timedelta(hours = f.variables['tau1'][-1])
            else:
                times = gettimes(f)
                sdate = times[0]
                edate = times[-1]

            if len(tespaths) > 0:
                tesl, tesr = plot_tes(ax, lonbs, latbs, tespaths)
                if not tesl is None:
                    llines.append((tesl, tesr))
            if len(omipaths) > 0:
                omil, omir = plot_omi(ax, lonbs, latbs, omipaths, airden = f.variables['AIRDEN'][:].mean(0).mean(1), airdenvert = vertcrd)
                if not omil is None:
                    llines.append((omil, omir))

        try:
            title = '%s to %s' % (sdate.strftime('%Y-%m-%d'), edate.strftime('%Y-%m-%d'))
        except:
            title = var_name
        if sigma:
            axs[0].set_ylabel('sigma')
        else:
            axs[0].set_ylabel('pressure')
            
        xmax = -np.inf
        xmin = np.inf
        for ax in fig.axes:
            tmp_xmin, tmp_xmax = ax.get_xlim()
            xmax = max(tmp_xmax, xmax)
            xmin = min(tmp_xmin, xmin)
        for ax in fig.axes:
            ax.set_xlim(xmin, xmax)
            
        if len(axs) == 1:
            axs[0].set_xlabel('%s %s' % (var_name, outunit))
        else:
            axs[0].set_xlabel('South')
            axs[1].set_xlabel('East')
            axs[2].set_xlabel('North')
            axs[3].set_xlabel('West')
            fig.text(.5, .90, '%s %s' % (var_name, outunit), horizontalalignment = 'center', fontsize = 16)
        nl = 0
        for ax in axs:
            if len(ax.get_lines()) > nl:
                nl = len(ax.get_lines())
                pl.sca(ax)
        
        llabels = [l[0].get_label() for l in llines]
        pl.legend(llines, llabels, bbox_to_anchor = (.1, 1), loc = 'upper left', bbox_transform = fig.transFigure, ncol = 6)
        if edges:
            fig.text(0.95, 0.975, title, horizontalalignment = 'right', verticalalignment = "top", fontsize = 16)
        else:
            fig.text(0.95, 0.025, title, horizontalalignment = 'right', verticalalignment = "bottom", fontsize = 16)
        fig.savefig('%s_%s.%s' % (args.outpath, var_name, args.figformat))
        pl.close(fig)
    return fig
Exemplo n.º 6
0
def pncdump(f,
            name='unknown',
            header=False,
            variables=[],
            line_length=80,
            full_indices=None,
            float_precision=8,
            double_precision=16,
            isgroup=False,
            timestring=False,
            outfile=sys.stdout):
    """
    pncdump is designed to implement basic functionality
    of the NetCDF ncdump binary.
    
    f         - a PseudoNetCDFFile object
    name      - string name for the file 
                (equivalent to ncdump -n name)
    header    - boolean value for display of header only
                (equivalent to ncdump -h)
    variables - iterable of variable names for subsetting
                data display (equivalent to ncddump -v var[,...]

    pncdump(vertical_diffusivity('camx_kv.20000825.hgbpa_04km.TCEQuh1_eta.v43.tke',rows=65,cols=83))
    """
    file_type = str(type(f)).split("'")[1]
    float_fmt = "%%.%dg" % (float_precision, )
    double_fmt = "%%.%dg" % (double_precision, )
    int_fmt = "%i"
    formats = defaultdict(lambda: "%s",
                   float = double_fmt, \
                   float64 = double_fmt, \
                   float32 = float_fmt, \
                   int32 = "%i", \
                   uint32 = "%i", \
                   int64 = "%i", \
                   str = "%s", \
                   bool = "%s", \
                   string8 = "'%s'")

    funcs = dict(
    )  #float = lambda x: double_fmt % x)    # initialize indentation as 8 characters
    # based on ncdump
    indent = 8 * " "
    if isgroup:
        startindent = 4 * " "
    else:
        startindent = 4 * ""

    # First line of CDL
    if not isgroup: outfile.write("%s %s {\n" % (
            file_type,
            name,
    ))

    ###########################
    # CDL Section 1: dimensions
    ###########################
    outfile.write(startindent + "dimensions:\n")
    for dim_name, dim in f.dimensions.items():
        if dim.isunlimited():
            outfile.write(startindent + 1 * indent +
                          ("%s = UNLIMITED // (%s currently) \n" %
                           (dim_name, len(dim))))
        else:
            outfile.write(startindent + 1 * indent + ("%s = %s ;\n" %
                                                      (dim_name, len(dim))))

    ###################################
    # CDL Section 2: variables metadata
    ###################################
    if len(f.variables.keys()) > 0:
        outfile.write("\n" + startindent + "variables:\n")
    for var_name, var in f.variables.items():
        var_type = dict(float32='float', \
                        float64='double', \
                        int32='integer', \
                        uint32='integer', \
                        int64='long', \
                        bool='bool', \
                        string8='char', \
                        string80='char').get(var.dtype.name, var.dtype.name)
        outfile.write(startindent + 1 * indent +
                      ("%s %s%s;\n" %
                       (var_type, var_name, str(var.dimensions).replace(
                           'u\'', '').replace('\'', '').replace(',)', ')'))))
        for prop_name in var.ncattrs():
            prop = getattr(var, prop_name)
            outfile.write(startindent + 2 * indent + (
                "%s:%s = %s ;\n" %
                (var_name, prop_name, repr(prop).replace("'", '"'))))

    ################################
    # CDL Section 3: global metadata
    ################################
    outfile.write("\n\n// global properties:\n")
    for prop_name in f.ncattrs():
        prop = getattr(f, prop_name)
        outfile.write(startindent + 2 * indent +
                      (":%s = %s ;\n" %
                       (prop_name, repr(prop).replace("'", '"'))))

    if hasattr(f, 'groups'):
        for group_name, group in f.groups.items():
            outfile.write(startindent + 'group %s:\n' % group_name)
            pncdump(group,
                    name=name,
                    header=header,
                    variables=variables,
                    line_length=line_length,
                    full_indices=full_indices,
                    float_precision=float_precision,
                    double_precision=double_precision,
                    isgroup=True)
    if not header:
        # Error trapping prevents the user from getting an error
        # when they cancel a dump or when they break a redirected
        # pipe
        try:
            #####################
            # CDL Section 4: data
            #####################
            outfile.write("\n\n" + startindent + "data:\n")

            # data indentation is only 1 space
            indent = " "

            # Subset variables for output
            display_variables = [
                var_name for var_name in f.variables.keys()
                if var_name in variables or variables == []
            ]
            if variables != []:
                if len(variables) < len(display_variables):
                    warn("Not all specified variables were available")

            # For each variable output data
            # currently assumes 3-D data
            for var_name in display_variables:
                var = f.variables[var_name]
                if isinstance(var, PseudoNetCDFMaskedVariable) or hasattr(
                        var, '_FillValue'):

                    def writer(row, last):
                        if isscalar(row) or row.ndim == 0:
                            outfile.write(startindent + '  ' +
                                          str(row.filled().astype(ndarray)) +
                                          ';\n')
                            return
                        #old = get_printoptions()
                        #set_printoptions(threshold = inf, linewidth = line_length)
                        #tmpstr =  startindent + '    ' + array2string(row, separator = commaspace, formatter = funcs).replace('\n', '\n' + startindent + '    ')[1:-1].replace('--', '_')
                        #if last:
                        #    tmpstr += ';'
                        #else:
                        #    tmpstr += commaspace
                        #set_printoptions(**old)
                        #outfile.write(tmpstr)
                        #outfile.write('\n')

                        tmpstr = StringIO()
                        if ma.getmaskarray(row).all():
                            tmpstr.write(b', '.join([b'_'] * row.size) + b', ')
                        else:
                            savetxt(tmpstr,
                                    ma.filled(row),
                                    fmt,
                                    delimiter=commaspace,
                                    newline=commaspace)
                        if last:
                            tmpstr.seek(-2, 1)
                            tmpstr.write(semicolon)
                        tmpstr.seek(0, 0)
                        tmpstr = tmpstr.read().decode('ASCII')
                        tmpstr = tmpstr.replace(
                            fmt % getattr(row, 'fill_value', 0) + ',', '_,')
                        tmpstr = textwrap.fill(
                            tmpstr,
                            line_length,
                            initial_indent=startindent + '  ',
                            subsequent_indent=startindent + '    ')
                        try:
                            outfile.write(tmpstr)
                            outfile.write('\n')
                        except Exception as e:
                            exception_handler(e, outfile)
                else:

                    def writer(row, last):
                        if isscalar(row) or row.ndim == 0:
                            outfile.write(startindent + '  ' +
                                          str(row.astype(ndarray)) + ';\n')
                            return
                        old = get_printoptions()
                        set_printoptions(threshold=inf, linewidth=line_length)
                        tmpstr = startindent + '    ' + array2string(
                            row, separator=commaspace,
                            formatter=funcs).replace(
                                '\n', '\n' + startindent + '    ')[1:-1]
                        #tmpstr = StringIO()
                        #savetxt(tmpstr, row, fmt, delimiter = commaspace, newline =commaspace)
                        if last:
                            tmpstr += ';'
                            #tmpstr.seek(-2, 1)
                            #tmpstr.write(semicolon)
                        else:
                            tmpstr += commaspace
                        #tmpstr.seek(0, 0)
                        #outfile.write(textwrap.fill(str(tmpstr.read()), line_length, initial_indent = startindent + '  ', subsequent_indent = startindent + '    '))
                        set_printoptions(**old)
                        try:
                            outfile.write(tmpstr)
                            outfile.write('\n')
                        except Exception as e:
                            exception_handler(e, outfile)

                outfile.write(startindent + 1 * indent + ("%s =\n" % var_name))
                if var_name in ('time', 'time_bounds') and timestring:
                    from PseudoNetCDF.coordutil import gettimes, gettimebnds
                    if var_name == 'time':
                        times = gettimes(f)
                    elif var_name == 'time_bounds':
                        times = gettimebnds(f)

                    for i in ndindex(var.shape):
                        val = var[i]
                        if ma.is_masked(val):
                            array_str = '_'
                        else:
                            array_str = startindent + 2 * indent + str(val)

                        if i == tuple(map(lambda x_: x_ - 1, var.shape)):
                            array_str += ";"
                        else:
                            array_str += ","

                        array_str += " // %s%s %s \n" % (var_name, i,
                                                         str(times[i]))
                        try:
                            outfile.write(array_str)
                        except Exception as e:
                            exception_handler(e, outfile)
                elif full_indices is not None:
                    id_display = {'f': lambda idx: str(tuple([idx[i]+1 for i in range(len(idx)-1,-1,-1)])), \
                                  'c': lambda idx: str(idx)}[full_indices]

                    #for i, val in ndenumerate(var):
                    for i in ndindex(var.shape):
                        val = var[i]
                        if ma.is_masked(val):
                            array_str = '_'
                        else:
                            fmt = startindent + 2 * indent + formats[
                                var.dtype.name]
                            array_str = fmt % val

                        if i == tuple(map(lambda x_: x_ - 1, var.shape)):
                            array_str += ";"
                        else:
                            array_str += ","

                        array_str += " // %s%s \n" % (var_name, id_display(i))
                        try:
                            outfile.write(array_str)
                        except Exception as e:
                            exception_handler(e, outfile)
                else:
                    dimensions = [len(f.dimensions[d]) for d in var.dimensions]
                    if len(dimensions) > 1:
                        first_dim = prod(dimensions[:-1])
                        second_dim = dimensions[-1]
                        shape = [first_dim, second_dim]
                    else:
                        shape = [1] + dimensions
                    var2d = var[...].reshape(*shape)
                    fmt = ', '.join(shape[-1] * [formats[var.dtype.name]])
                    fmt = formats[var.dtype.name]
                    lastrow = var2d.shape[0] - 1
                    for rowi, row in enumerate(var2d):
                        try:
                            writer(row, rowi == lastrow)
                        except Exception as e:
                            exception_handler(e, outfile)

        except Exception as e:
            exception_handler(e, outfile)

    outfile.write("}\n")
    return outfile
Exemplo n.º 7
0
def pnceval(args):
    from warnings import warn
    from PseudoNetCDF.core._functions import pncbfunc
    if args.variables is None:
        args.variables = set(args.ifiles[0].variables.keys()).difference(
            args.coordkeys)
    console = createconsole(args.ifiles, args)
    ifile0, ifile1 = args.ifiles
    from PseudoNetCDF.coordutil import gettimes
    print('# ifile0=' + args.ipath[0])
    print('# ifile1=' + args.ipath[1])
    print('# Stats calculated as func(ifile0, ifile1)')
    print('# Generally: func(obs, mod, ....)')
    for ipath, ifile in zip(args.ipath, args.ifiles):
        print('# Meta-data from %s' % ipath)
        try:
            times = gettimes(ifile)
            tstart = times[:].min()
            tstop = times[:].max()
            print('# Date Range: ' + str(tstart) + ' to ' + str(tstop))
        except Exception as e:
            warn(str(e))
        try:
            lon = ifile.variables['longitude']
            print('# Longitude Range: ' + str(float(lon.min())) + ' to ' +
                  str(float(lon.max())))
        except Exception as e:
            warn(str(e))
        try:
            lat = ifile.variables['latitude']
            print('# Latitude Range: ' + str(float(lat.min())) + ' to ' +
                  str(float(lat.max())))
        except Exception as e:
            warn(str(e))

    np.seterr(divide='ignore', invalid='ignore')
    if args.csv:
        from collections import OrderedDict
        output = OrderedDict()
    for k in args.funcs:
        console.locals[k] = func = eval(k)
        output[k] = OrderedDict()
        if args.csv:
            print('# %s: %s' % (k, func.__doc__.strip()))
        try:
            console.locals[k + '_f'] = ofile = pncbfunc(func, ifile0, ifile1)
        except Exception as e:
            warn("Skipped " + k + ';' + str(e))
            continue
        for vk in args.variables:
            if vk in ('time', 'TFLAG'):
                continue
            if args.csv:
                outv = ofile.variables[vk][:]
                outv = np.ma.array(outv)
                output[k][vk] = outv.ravel()[0]
            else:
                print('%s,%s,%s,%f' % (vk, func.__doc__.strip(), k,
                                       ofile.variables[vk].ravel()[0]))
    if args.csv:
        print(','.join(['VAR'] + args.funcs))
        for vk in args.variables:
            print(','.join(
                [vk] +
                ['%f' % output[fk].get(vk, np.nan) for fk in args.funcs]))

    np.seterr(divide='warn', invalid='warn')
    if args.interactive:
        console.interact()
Exemplo n.º 8
0
def pncdump(f, name = 'unknown', header = False, variables = [], line_length = 80, full_indices = None, float_precision = 8, double_precision = 16, isgroup = False, timestring = False, outfile = sys.stdout):
    """
    pncdump is designed to implement basic functionality
    of the NetCDF ncdump binary.
    
    f         - a PseudoNetCDFFile object
    name      - string name for the file 
                (equivalent to ncdump -n name)
    header    - boolean value for display of header only
                (equivalent to ncdump -h)
    variables - iterable of variable names for subsetting
                data display (equivalent to ncddump -v var[,...]

    pncdump(vertical_diffusivity('camx_kv.20000825.hgbpa_04km.TCEQuh1_eta.v43.tke',rows=65,cols=83))
    """
    file_type = str(type(f)).split("'")[1]
    formats = defaultdict(lambda: "%s", float64 = "%%.%de" % (double_precision,), \
                   float32 = "%%.%de" % (float_precision,), \
                   int32 = "%i", \
                   uint32 = "%i", \
                   int64 = "%i", \
                   str = "%s", \
                   bool = "%s", \
                   string8 = "'%s'")
    
    float_fmt = "%%.%df" % (float_precision,)
    int_fmt = "%i"
    # initialize indentation as 8 characters
    # based on ncdump
    indent = 8*" "
    if isgroup:
        startindent = 4*" "
    else:
        startindent = 4*""
        
    # First line of CDL
    if not isgroup: outfile.write("%s %s {\n" % (file_type, name,))
    
    ###########################
    # CDL Section 1: dimensions
    ###########################
    outfile.write(startindent + "dimensions:\n")
    for dim_name, dim in f.dimensions.items():
        if dim.isunlimited():
            outfile.write(startindent + 1*indent+("%s = UNLIMITED // (%s currently) \n" % (dim_name,len(dim))))
        else:
            outfile.write(startindent + 1*indent+("%s = %s ;\n" % (dim_name,len(dim))))
    
    ###################################
    # CDL Section 2: variables metadata
    ###################################
    if len(f.variables.keys()) > 0:
        outfile.write("\n" + startindent + "variables:\n")
    for var_name, var in f.variables.items():
        var_type = dict(float32='float', \
                        float64='double', \
                        int32='integer', \
                        uint32='integer', \
                        int64='long', \
                        bool='bool', \
                        string8='char', \
                        string80='char').get(var.dtype.name, var.dtype.name)
        outfile.write(startindent + 1*indent+("%s %s%s;\n" % (var_type, var_name,str(var.dimensions).replace('u\'', '').replace('\'','').replace(',)',')'))))
        for prop_name in var.ncattrs():
            prop = getattr(var, prop_name)
            outfile.write(startindent + 2*indent+("%s:%s = %s ;\n" % (var_name,prop_name,repr(prop).replace("'", '"'))))
    
    ################################
    # CDL Section 3: global metadata
    ################################
    outfile.write("\n\n// global properties:\n")
    for prop_name in f.ncattrs():
        prop = getattr(f, prop_name)
        outfile.write(startindent + 2*indent+(":%s = %s ;\n" % (prop_name, repr(prop).replace("'",'"'))))

    if hasattr(f, 'groups'):
        for group_name, group in f.groups.items():
            outfile.write(startindent + 'group %s:\n' % group_name)
            pncdump(group, name = name, header = header, variables = variables, line_length = line_length, full_indices = full_indices, float_precision = float_precision, double_precision = double_precision, isgroup = True)
    if not header:
        # Error trapping prevents the user from getting an error
        # when they cancel a dump or when they break a redirected
        # pipe
        try:
            #####################
            # CDL Section 4: data
            #####################
            outfile.write("\n\n" + startindent + "data:\n")
            
            # data indentation is only 1 space
            indent = " "
            
            # Subset variables for output
            display_variables = [var_name for var_name in f.variables.keys() if var_name in variables or variables == []]
            if variables != []:
                if len(variables) < len(display_variables):
                    warn("Not all specified variables were available")
            
            # For each variable output data 
            # currently assumes 3-D data
            for var_name in display_variables:
                var = f.variables[var_name]
                if isinstance(var, PseudoNetCDFMaskedVariable) or hasattr(var, '_FillValue'):
                    def writer(row, last):
                        if isscalar(row) or row.ndim == 0:
                            outfile.write(startindent + '  ' + str(row.filled().astype(ndarray)))
                            return
                        tmpstr = StringIO(bytes('', 'utf-8'))
                        if ma.getmaskarray(row).all():
                            tmpstr.write(', '.join(['_'] * row.size) + ', ')
                        else:
                            savetxt(tmpstr, ma.filled(row), fmt, delimiter = ', ', newline =', ')
                        if last:
                            tmpstr.seek(-2, 1)
                            tmpstr.write(b';')
                        tmpstr.seek(0, 0)
                        tmpstr = tmpstr.read()
                        tmpstr = tmpstr.replace(bytes(fmt % getattr(row, 'fill_value', 0) + ',', 'utf-8'), bytes('_,', 'utf-8'))
                        outfile.write(textwrap.fill(tmpstr.decode('utf-8'), line_length, initial_indent = startindent + '  ', subsequent_indent = startindent + '    '))
                        outfile.write('\n')
                else:
                    def writer(row, last):
                        if isscalar(row) or row.ndim == 0:
                            outfile.write(startindent + '  ' + str(row.astype(ndarray)))
                            return
                        tmpstr = StringIO(bytes('', 'utf-8'))
                        savetxt(tmpstr, row, fmt, delimiter = ', ', newline =', ')
                        if last:
                            tmpstr.seek(-2, 1)
                            tmpstr.write(';')
                        tmpstr.seek(0, 0)
                        outfile.write(textwrap.fill(tmpstr.read().decode('utf-8'), line_length, initial_indent = startindent + '  ', subsequent_indent = startindent + '    '))
                        outfile.write('\n')
                        
                        
                outfile.write(startindent + 1*indent+("%s =\n" % var_name))
                if var_name in ('time', 'time_bounds') and timestring:
                    from PseudoNetCDF.coordutil import gettimes, gettimebnds
                    if var_name == 'time':
                        times = gettimes(f)
                    elif var_name == 'time_bounds':
                        times = gettimebnds(f)
                    
                    for i in ndindex(var.shape):
                        val = var[i]
                        if ma.is_masked(val):
                            array_str = '_'
                        else:
                            array_str = startindent + 2*indent + str(val)

                        if i == tuple(map(lambda x_: x_ - 1, var.shape)):
                            array_str += ";"
                        else:
                            array_str += ","

                        array_str += " // %s%s %s \n" % (var_name, i, str(times[i]))
                        try:
                            outfile.write(array_str)
                        except IOError:
                            outfile.close()
                            exit()
                elif full_indices is not None:
                    id_display = {'f': lambda idx: str(tuple([idx[i]+1 for i in range(len(idx)-1,-1,-1)])), \
                                  'c': lambda idx: str(idx)}[full_indices]
                                  
                    #for i, val in ndenumerate(var):
                    for i in ndindex(var.shape):
                        val = var[i]
                        if ma.is_masked(val):
                            array_str = '_'
                        else:
                            fmt = startindent + 2*indent+formats[var.dtype.name]
                            array_str = fmt % val

                        if i == tuple(map(lambda x_: x_ - 1, var.shape)):
                            array_str += ";"
                        else:
                            array_str += ","

                        array_str += " // %s%s \n" % (var_name, id_display(i))
                        try:
                            outfile.write(array_str)
                        except IOError:
                            outfile.close()
                            exit()
                else:
                    dimensions = [len(f.dimensions[d]) for d in var.dimensions]
                    if len(dimensions) > 1:
                        first_dim = prod(dimensions[:-1])
                        second_dim = dimensions[-1]
                        shape = [first_dim, second_dim]
                    else:
                        shape = [1]+dimensions
                    var2d = var[...].reshape(*shape)
                    fmt = ', '.join(shape[-1] * [formats[var.dtype.name]])
                    fmt = formats[var.dtype.name]
                    lastrow = var2d.shape[0] - 1
                    for rowi, row in enumerate(var2d):
                        try:
                            writer(row, rowi == lastrow)
                                
                        except IOError as e:
                            warn(repr(e) + "; Typically from CTRL+C or exiting less")
                            exit()
                                            
                    
        except KeyboardInterrupt:
            outfile.flush()
            exit()
    outfile.write("}\n")
    return outfile