Beispiel #1
0
def cdtoms(path):
    outfile = PseudoNetCDFFile()
    inlines = open(path, 'rU').readlines()
    dayline = dayre.match(inlines[0]).groupdict()
    date = datetime.strptime(dayline['daystring'], '%b %d, %Y')
    lonline = lonre.match(inlines[1]).groupdict()
    latline = latre.match(inlines[2]).groupdict()
    for propdict in [dayline, lonline, latline]:
        for k,v in propdict.items():
            try:
                v = eval(v)
            except:
                pass
            setattr(outfile, k, v)
    blat, bsn = outfile.blat.split()
    elat, esn = outfile.elat.split()
    blat = {'N': 1, 'S': -1}[bsn] * eval(blat)
    elat = {'N': 1, 'S': -1}[esn] * eval(elat)

    blon, bwe = outfile.blon.split()
    elon, ewe = outfile.elon.split()
    blon = {'E': 1, 'W': -1}[bwe] * eval(blon)
    elon = {'E': 1, 'W': -1}[ewe] * eval(elon)


    outfile.createDimension('LAT', outfile.latbins)
    outfile.createDimension('LON', outfile.lonbins)
    datalines = inlines[3:]
    lats = []
    for i, line in enumerate(datalines):
        if 'lat' not in line:
            datalines[i] = line[1:-1]
        else:
            data, lat = line.split('lat =')
            datalines[i] = data[1:-1].rstrip() + '\n'
            lats.append(lat.strip())
    
    n = 3
    
    datalines = ''.join(datalines).split('\n')
    var = outfile.createVariable('ozone', 'f', ('LAT', 'LON'))
    var.units = 'matm-cm'
    var.long_name = var.var_desc = 'ozone'.ljust(16)
    var[:] = array([[eval(s[n*i:n*i+n]) for i in range(len(s)/n)] for s in datalines if s.strip() != ''], 'f')

    var = outfile.createVariable('lat', 'f', ('LAT',))
    var.units = 'degrees N'
    var[:] = arange(blat, elat+outfile.latbinsize, outfile.latbinsize)

    var = outfile.createVariable('lon', 'f', ('LON',))
    var.units = 'degrees E'
    var[:] = arange(blon, elon+outfile.lonbinsize, outfile.lonbinsize)


    return outfile
Beispiel #2
0
def cdtoms(path):
    outfile = PseudoNetCDFFile()
    inlines = open(path, 'rU').readlines()
    dayline = dayre.match(inlines[0]).groupdict()
    date = datetime.strptime(dayline['daystring'], '%b %d, %Y')
    lonline = lonre.match(inlines[1]).groupdict()
    latline = latre.match(inlines[2]).groupdict()
    for propdict in [dayline, lonline, latline]:
        for k, v in propdict.items():
            try:
                v = eval(v)
            except:
                pass
            setattr(outfile, k, v)
    blat, bsn = outfile.blat.split()
    elat, esn = outfile.elat.split()
    blat = {'N': 1, 'S': -1}[bsn] * eval(blat)
    elat = {'N': 1, 'S': -1}[esn] * eval(elat)

    blon, bwe = outfile.blon.split()
    elon, ewe = outfile.elon.split()
    blon = {'E': 1, 'W': -1}[bwe] * eval(blon)
    elon = {'E': 1, 'W': -1}[ewe] * eval(elon)

    outfile.createDimension('LAT', outfile.latbins)
    outfile.createDimension('LON', outfile.lonbins)
    datalines = inlines[3:]
    lats = []
    for i, line in enumerate(datalines):
        if 'lat' not in line:
            datalines[i] = line[1:-1]
        else:
            data, lat = line.split('lat =')
            datalines[i] = data[1:-1].rstrip() + '\n'
            lats.append(lat.strip())

    n = 3

    datalines = ''.join(datalines).split('\n')
    var = outfile.createVariable('ozone', 'f', ('LAT', 'LON'))
    var.units = 'matm-cm'
    var.long_name = var.var_desc = 'ozone'.ljust(16)
    var[:] = array([[eval(s[n * i:n * i + n]) for i in range(len(s) / n)]
                    for s in datalines if s.strip() != ''], 'f')

    var = outfile.createVariable('lat', 'f', ('LAT', ))
    var.units = 'degrees N'
    var[:] = arange(blat, elat + outfile.latbinsize, outfile.latbinsize)

    var = outfile.createVariable('lon', 'f', ('LON', ))
    var.units = 'degrees E'
    var[:] = arange(blon, elon + outfile.lonbinsize, outfile.lonbinsize)

    return outfile
Beispiel #3
0
def MorphoIRRt(irrpath):
    mrglines = open(irrpath).readlines()
    try:
        datelines = [l for l in mrglines if _split1 in l]
        datestr = datelines[0].split(_split1)[-1].strip()
        jday = int(datetime.strptime(datestr, '%d-%b-%y').strftime('%Y%j'))
    except Exception:
        warn('Could not find/parse date; using 1900001')
        jday = 1900001

    mrglines = [
        line for line in mrglines
        if line[:2] not in ('//', '**') and line not in ('', '\n')
    ]
    name_line = mrglines.pop(0)
    irrlabel = re.compile(r'rt\[\S+\]')
    rxn_names = [
        'IRR_%s' % name.replace('rt[', '').replace(']', '')
        for name in irrlabel.findall(name_line)
    ]
    name_line = ['N', 'T'] + rxn_names
    unit_line = mrglines.pop(0).split()
    unit_line = [unit for unit in unit_line]
    assert (all([eval(v) == 0. for v in mrglines.pop(0).split()][2:]))
    mrgfile = PseudoNetCDFFile()
    mrgfile.createDimension('TSTEP', len(mrglines))
    mrgfile.createDimension('DATE-TIME', 2)
    mrgfile.createDimension('VAR', 1)
    unit_dict = dict([(k, v) for k, v in zip(name_line, unit_line)])
    tflag = mrgfile.createVariable('TFLAG', 'f', ('TSTEP', 'VAR', 'DATE-TIME'))
    tflag.units = '<JDAY, MIN>'
    tflag.long_name = tflag.var_desc = 'TFLAG'
    tflag[:, :, 0] = jday
    for name in name_line:
        var = mrgfile.createVariable(name, 'f', ('TSTEP', ))
        var.units = unit_dict[name]
        var.long_name = var.var_desc = name
    for ti, line in enumerate(mrglines):
        for var_name, value in zip(name_line, line.split()):
            var = mrgfile.variables[var_name]
            var[ti] = float(value)

    for name in name_line:
        if name in ('T', 'N'):
            continue
        var = mrgfile.variables[name]
        var[1:] = (var[1:] - var[:-1]) * 1000.
    tflag[:, :, 1] = mrgfile.variables['T'][:, None]
    return mrgfile
def shadoz(inpath):
    datafile = open(inpath, 'r')
    datalines = datafile.read().split('\n')
    nmeta = int(datalines[0])
    meta = dict([[w.strip() for w in l.split(': ')]
                 for l in datalines[1:nmeta - 2]])
    varline, unitline = datalines[nmeta - 2:nmeta]
    varnames = spaces.split(varline)
    units = spaces.split(unitline)
    data = np.fromstring('\n'.join(datalines[nmeta:]),
                         sep=' ').reshape(-1, len(varnames))
    outf = PseudoNetCDFFile()
    outf.createDimension('time', data.shape[0])
    missing = -9999
    for k, v in meta.items():
        setattr(outf, k, v)
        if k == 'Missing or bad values':
            missing = eval(v)

    for varname, unit, vals in zip(varnames, units, data.T):
        var = outf.createVariable(varname.replace(' ', '_'), 'f', ('time', ))
        var.units = unit
        var.standard_name = varname
        var[:] = np.ma.masked_values(vals[:], missing)

    return outf
Beispiel #5
0
def MorphoConc(concpath):
    conclines = open(concpath).readlines()
    conclines = [
        line for line in conclines
        if line[:2] not in ('//', '**') and line not in ('', '\n')
    ]
    name_line = conclines.pop(0)
    conclabel = re.compile(r'n\[\S+\]')
    conc_names = [
        name.replace('n[', '').replace(']', '')
        for name in conclabel.findall(name_line)
    ]
    name_line = ['N', 'T'] + conc_names
    unit_line = conclines.pop(0).split()
    unit_line = [unit for unit in unit_line]
    concfile = PseudoNetCDFFile()
    concfile.createDimension('TSTEP', len(conclines))
    concfile.createDimension('DATE-TIME', 2)
    concfile.createDimension('VAR', 1)
    unit_dict = dict([(k, v) for k, v in zip(name_line, unit_line)])
    for name in name_line:
        var = concfile.createVariable(name, 'f', ('TSTEP', ))
        var.units = unit_dict[name]
        var.long_name = var.var_desc = name
    for ti, line in enumerate(conclines):
        for var_name, value in zip(name_line, line.split()):
            var = concfile.variables[var_name]
            var[ti] = float(value)

    return concfile
Beispiel #6
0
def skysonde1sec(inpath):
    datafile = open(inpath, 'r')
    datalines = datafile.read().split('\n')
    nmeta = int(datalines[1].split(' = ')[1])
    meta = dict([[w.strip() for w in line.split(' = ')]
                 for line in datalines[1:nmeta - 2] if line != ''])

    varline, unitline = datalines[nmeta - 2:nmeta]
    varnames = [vn.strip() for vn in spaces.split(varline)]
    units = [u.strip()[1:-1].strip() for u in spaces.split(unitline)]
    print(units)
    data = np.fromstring(', '.join(datalines[nmeta:]),
                         sep=',').reshape(-1, len(varnames))
    outf = PseudoNetCDFFile()
    for pk, pv in meta.items():
        setattr(outf, pk, pv)

    outf.createDimension('time', data.shape[0])
    for varname, unit, vals in zip(varnames, units, data.T):
        var = outf.createVariable(varname.replace(' ', '_'), 'f', ('time', ))
        var.units = unit
        var.standard_name = varname
        var[:] = np.ma.masked_values(vals[:], 99999)

    return outf
Beispiel #7
0
def MorphoIRRt(irrpath):
    mrglines = open(irrpath).readlines()
    try:
        jday = int(
            datetime.strptime(
                [l for l in mrglines if "Environment Tables for" in l][0].split("Environment Tables for")[-1].strip(),
                "%d-%b-%y",
            ).strftime("%Y%j")
        )
    except:
        warn("Could not find/parse date; using 1900001")
        jday = 1900001

    mrglines = [line for line in mrglines if line[:2] not in ("//", "**") and line not in ("", "\n")]
    name_line = mrglines.pop(0)
    name_line = ["N", "T"] + [
        "IRR_%s" % name.replace("rt[", "").replace("]", "") for name in irrlabel.findall(name_line)
    ]
    unit_line = mrglines.pop(0).split()
    unit_line = [unit for unit in unit_line]
    assert all([eval(v) == 0.0 for v in mrglines.pop(0).split()][2:])
    mrgfile = PseudoNetCDFFile()
    mrgfile.createDimension("TSTEP", len(mrglines))
    mrgfile.createDimension("DATE-TIME", 2)
    mrgfile.createDimension("VAR", 1)
    unit_dict = dict([(k, v) for k, v in zip(name_line, unit_line)])
    tflag = mrgfile.createVariable("TFLAG", "f", ("TSTEP", "VAR", "DATE-TIME"))
    tflag.units = "<JDAY, MIN>"
    tflag.long_name = tflag.var_desc = "TFLAG"
    tflag[:, :, 0] = 1900001
    for name in name_line:
        var = mrgfile.createVariable(name, "f", ("TSTEP",))
        var.units = unit_dict[name]
        var.long_name = var.var_desc = name
    for ti, line in enumerate(mrglines):
        for var_name, value in zip(name_line, line.split()):
            var = mrgfile.variables[var_name]
            var[ti] = float(value)

    for name in name_line:
        if name in ("T", "N"):
            continue
        var = mrgfile.variables[name]
        var[1:] = (var[1:] - var[:-1]) * 1000.0
    tflag[:, :, 1] = mrgfile.variables["T"][:, None]
    return mrgfile
Beispiel #8
0
    def testVal2idx(self):
        ncf = PseudoNetCDFFile()
        coorde = np.arange(9, dtype='f')
        coordc = (coorde[:-1] + coorde[1:]) / 2.
        ncf.createDimension('coord', coordc.size)
        ncf.createDimension('nv', 2)
        ncf.createVariable('coord', 'f', ('coord', ), values=coordc)
        bncf = ncf.copy()
        bncf.createVariable('coord_bounds',
                            'f', ('coord', 'nv'),
                            values=coorde.repeat(2, 0)[1:-1].reshape(-1, 2))
        bncf.variables['coord'].bounds = 'coord_bounds'
        cvals = [-1, .25, 4.5, 4.99, 5, 6.75, 10]
        expectedb = np.array([0, 0, 4, 4, 5, 6, 7])
        expectedn = np.array([0, 0, 4, 4, 4, 6, 7])

        def checkvals(ncf, method, clean, compare):
            withbnds = str('coord_bounds' in ncf.variables)
            prefix = withbnds + '&' + method + '&' + clean
            idx = ncf.val2idx('coord',
                              cvals,
                              method=method,
                              clean=clean,
                              bounds='warn')
            warn(prefix + ' got: ' + repr(idx))
            warn(prefix + ' chk: ' + repr(compare))
            assert (np.ma.allclose(compare, idx))

        mw = np.ma.masked_where

        nn_mask = [0] * 7
        bn_mask = [0] * 7
        nm_mask = [1, 1, 0, 0, 0, 0, 1]
        bm_mask = [1, 0, 0, 0, 0, 0, 1]
        em_mask = [1, 1, 0, 1, 1, 1, 1]
        checkvals(ncf, 'nearest', 'none', mw(nn_mask, expectedn))
        checkvals(ncf, 'bounds', 'none', mw(bn_mask, expectedb))
        checkvals(ncf, 'nearest', 'mask', mw(nm_mask, expectedn))
        checkvals(ncf, 'bounds', 'mask', mw(bm_mask, expectedb))

        checkvals(bncf, 'nearest', 'none', mw(nn_mask, expectedn))
        checkvals(bncf, 'bounds', 'none', mw(bn_mask, expectedb))
        checkvals(bncf, 'nearest', 'mask', mw(nm_mask, expectedn))
        checkvals(bncf, 'bounds', 'mask', mw(bm_mask, expectedb))

        checkvals(bncf, 'exact', 'mask', mw(em_mask, expectedb))
Beispiel #9
0
def raob(inpath):
    nfields, = np.fromfile(inpath, dtype = '>i4', count = 1) / 4.
    fblock = np.fromfile(inpath, dtype = '>i4')
    block = fblock[1:-2]
    block = block.reshape(-1, nfields + 1)[:, :-1];
    names, units = np.char.strip(block[:2].view('S4'));
    datas = (np.ma.masked_values(block[3, :], -2139062144).filled(-999) / 10.**block[2]);
    data = (np.ma.masked_values(block[4:, :], -2139062144).filled(-999) / 10.**block[2]);
    outf = PseudoNetCDFFile()
    outf.createDimension('level', data.shape[0])
    for k, u, v, vs in zip(names, units, data.T, datas):
        outf.createVariable(k, 'f', ('level',), units = u, values = v)
        outf.createVariable(k + '_FIRST', 'f', ('level',), units = u, values = vs)
        
    #np.savetxt(sys.stdout, (names, units), delimiter = ', ', fmt = '%7s')
    #np.savetxt(sys.stdout, data, delimiter = ', ', fmt = '%7.1f')
    return outf
Beispiel #10
0
def raob(inpath):
    nfields, = np.fromfile(inpath, dtype='>i4', count=1) / 4.
    fblock = np.fromfile(inpath, dtype='>i4')
    block = fblock[1:-2]
    block = block.reshape(-1, nfields + 1)[:, :-1]
    names, units = np.char.strip(block[:2].view('S4'))
    datas = (np.ma.masked_values(
        block[3, :], -2139062144).filled(-999) / 10.**block[2])
    data = (np.ma.masked_values(
        block[4:, :], -2139062144).filled(-999) / 10.**block[2])
    outf = PseudoNetCDFFile()
    outf.createDimension('level', data.shape[0])
    for k, u, v, vs in zip(names, units, data.T, datas):
        outf.createVariable(k, 'f', ('level',), units=u, values=v)
        outf.createVariable(k + '_FIRST', 'f', ('level',), units=u, values=vs)

    # np.savetxt(sys.stdout, (names, units), delimiter = ', ', fmt = '%7s')
    # np.savetxt(sys.stdout, data, delimiter = ', ', fmt = '%7.1f')
    return outf
Beispiel #11
0
    def testInterpDimension(self):
        f1 = PseudoNetCDFFile()
        f1.createDimension('time', 2)
        f1.createDimension('layer', 3)
        f1.createDimension('latitude', 4)
        f1.createDimension('longitude', 5)
        lay = f1.createVariable('layer', 'f', ('layer', ))
        lay[:] = np.arange(0, 3)
        simple = f1.createVariable('simple', 'f',
                                   ('time', 'layer', 'latitude', 'longitude'))
        simple[0] = np.arange(3 * 4 * 5).reshape(3, 4, 5)
        simple[1] = np.arange(3 * 4 * 5).reshape(3, 4, 5)

        f2 = f1.applyAlongDimensions(layer=lambda x: (x[1:] + x[:-1]) * .5)
        f3 = f1.interpDimension('layer', f2.variables['layer'])
        self.assertEqual(
            True,
            np.allclose(f2.variables['simple'][:], f3.variables['simple'][:]))
        f4 = PseudoNetCDFFile()
        f4.createDimension('time', 2)
        f4.createDimension('layer', 3)
        f4.createDimension('latitude', 4)
        f4.createDimension('longitude', 5)
        lay = f4.createVariable('layer', 'f',
                                ('time', 'layer', 'latitude', 'longitude'))
        lay[:] = np.arange(0, 3)[None, :, None, None]
        simple = f4.createVariable('simple', 'f',
                                   ('time', 'layer', 'latitude', 'longitude'))
        simple[0] = np.arange(3 * 4 * 5).reshape(3, 4, 5)
        simple[1] = np.arange(3 * 4 * 5).reshape(3, 4, 5)

        f5 = f4.applyAlongDimensions(layer=lambda x: (x[1:] + x[:-1]) * .5)
        lay[1] += .25
        f6 = f4.interpDimension('layer', f5.variables['layer'])
        self.assertEqual(
            True,
            np.allclose(f5.variables['simple'][0], f6.variables['simple'][0]))
        self.assertEqual(
            False,
            np.allclose(f5.variables['simple'][1], f6.variables['simple'][1]))
Beispiel #12
0
def MorphoConc(concpath):
    conclines = open(concpath).readlines()
    conclines = [line for line in conclines if line[:2] not in ("//", "**") and line not in ("", "\n")]
    name_line = conclines.pop(0)
    name_line = ["N", "T"] + [name.replace("n[", "").replace("]", "") for name in conclabel.findall(name_line)]
    unit_line = conclines.pop(0).split()
    unit_line = [unit for unit in unit_line]
    concfile = PseudoNetCDFFile()
    concfile.createDimension("TSTEP", len(conclines))
    concfile.createDimension("DATE-TIME", 2)
    concfile.createDimension("VAR", 1)
    unit_dict = dict([(k, v) for k, v in zip(name_line, unit_line)])
    for name in name_line:
        var = concfile.createVariable(name, "f", ("TSTEP",))
        var.units = unit_dict[name]
        var.long_name = var.var_desc = name
    for ti, line in enumerate(conclines):
        for var_name, value in zip(name_line, line.split()):
            var = concfile.variables[var_name]
            var[ti] = float(value)

    return concfile
Beispiel #13
0
def make_out(config, dates, verbose = 0):
    """
    Make an output file with appropriate dimensions and variables

      config - configuration dictionary
      dates  - iterable of dates
    """
    
    from PseudoNetCDF.sci_var import extract
    out = PseudoNetCDFFile()
    
    # Ordered entries are necessary for 
    # consistency with IOAPI
    out.dimensions = OrderedDict()
    out.variables = OrderedDict()

    # Get files for the first date
    get_files = file_getter(config = config, out = None, sources = None, verbose = verbose).get_files
    file_objs = get_files(dates[0])
    metf = [f for f in file_objs if 'PERIM' in f.dimensions][0]
    outnames = OrderedDict()
    for src, name, expr, unit in config['mappings']:
        if not [src, name, expr, unit] == ['SOURCE', 'MECHSPC', 'GEOS_EXPRESSION', 'UNIT']:
            outnames[name] = 0

    d = out.createDimension('TSTEP', len(dates))
    d.setunlimited(True)
    out.createDimension('DATE-TIME', len(metf.dimensions['DATE-TIME']))
    out.createDimension('LAY', len(metf.dimensions['LAY']))
    out.createDimension('VAR', len(outnames))
    mlatb = metf.variables['latitude_bounds']
    mlonb = metf.variables['longitude_bounds']
    if metf.FTYPE == 2:
        out.createDimension('PERIM', len(metf.dimensions['PERIM']))
        out.createDimension('nv', len(metf.dimensions['nv']))
        vardims = ('TSTEP', 'LAY', 'PERIM')
        coordbounddims = ('PERIM', 'nv')
        coorddims = ('PERIM',)
        out.FTYPE = 2 # Boundary http://www.baronams.com/products/ioapi/TUTORIAL.html
    elif metf.FTYPE == 1:
        out.createDimension('ROW', metf.NROWS)
        out.createDimension('COL', metf.NCOLS)
        out.createDimension('nv', len(metf.dimensions['nv']))
        vardims = ('TSTEP', 'LAY', 'ROW', 'COL')
        coordbounddims = ('ROW', 'COL', 'nv')
        coorddims = ('ROW', 'COL')
        out.FTYPE = 1 # Gridded http://www.baronams.com/products/ioapi/TUTORIAL.html
    else:
        raise ValueError('extract_type must be icon or bcon; got %s' % extract_type)
    
    if config['interpolation']['calcgeospress']:
        mpres = metf.variables['PRES']
        pres = out.createVariable('PRES', 'f', vardims)
        for k in mpres.ncattrs():
            setattr(pres, k, getattr(mpres, k))
        pres[:] = mpres[1:]

    out.createVariable('latitude_bounds', 'f', coordbounddims, units = mlatb.units, values = mlatb[:])

    out.createVariable('longitude_bounds', 'f', coordbounddims, units = mlonb.units, values = mlonb[:])

    mlat = metf.variables['latitude']
    out.createVariable('latitude', 'f', coorddims, units = mlat.units, values = mlat[:])

    mlon = metf.variables['longitude']
    out.createVariable('longitude', 'f', coorddims, units = mlon.units, values = mlon[:])
    coordstr = '/'.join(['%s,%s' % (o, a) for o, a in zip(mlon[:].ravel(), mlat[:].ravel())])
    geosfs = [f for f in file_objs if 'tau0' in f.variables.keys()]
    if len(geosfs) > 0:
        geosf = geosfs[0]
        geosf = extract(geosf, [coordstr])
        glatb = geosf.variables['latitude_bounds']
        out.createVariable('geos_latitude_bounds', 'f', coordbounddims, units = glatb.units, values = glatb[:, [0, 0, 1, 1]].reshape(mlatb[:].shape))

        glonb = geosf.variables['longitude_bounds']
        out.createVariable('geos_longitude_bounds', 'f', coordbounddims, units = glonb.units, values = glonb[:, [0, 1, 1, 0]].reshape(mlonb[:].shape))

        glat = geosf.variables['latitude']
        out.createVariable('geos_latitude', 'f', coorddims, units = glat.units, values = glat[:].reshape(mlat[:].shape))

        glon = geosf.variables['longitude']
        out.createVariable('geos_longitude', 'f', coorddims, units = glon.units, values = glon[:].reshape(mlon[:].shape))

    var = out.createVariable('TFLAG', 'i', ('TSTEP', 'VAR', 'DATE-TIME'))
    var.long_name = 'TFLAG'.ljust(16);
    var.var_desc = "Timestep-valid flags:  (1) YYYYDDD or (2) HHMMSS".ljust(80)
    var.units = "<YYYYDDD,HHMMSS>"
    for pk in metf.ncattrs():
        setattr(out, pk, getattr(metf, pk))
    setattr(out, 'VAR-LIST', ''.join([name.ljust(16) for name in outnames]))
    setattr(out, 'NVARS', len(outnames))
    setattr(out, 'SDATE', int(dates[0].strftime('%Y%j')))
    setattr(out, 'STIME', int(dates[0].strftime('%H%M%S')))
    setattr(out, 'EDATE', int(dates[-1].strftime('%Y%j')))
    setattr(out, 'ETIME', int(dates[-1].strftime('%H%M%S')))
    for src, name, expr, outunit in config['mappings']:
        var = out.createVariable(name, 'f', vardims)
        var.long_name = name.ljust(16);
        var.var_desc = name.ljust(80);
        var.units = outunit.ljust(16)
    out.lonlatcoords = coordstr
    return out
Beispiel #14
0
def cdtoms(path, outfile=None):
    if outfile is None:
        outfile = PseudoNetCDFFile()
    inlines = open(path, 'r').readlines()
    dayline = inlines[0]
    daygrp = _groupdict(dayre, dayline)

    sdate = 0
    if 'daystring' in daygrp:
        date = datetime.strptime(daygrp['daystring'], '%b %d, %Y')
        rdate = datetime(1970, 1, 1)
        sdate = (date - rdate).total_seconds()
    else:
        import pandas as pd
        dayparts = dayline.split(' ')
        for i in [3, 2, 1]:
            daystr = ' '.join(dayparts[:i])
            try:
                date = pd.to_datetime(daystr, box=False)
                break
            except Exception as e:
                print(e)
        else:
            date = np.datetime64('1970-01-01')
        rdate = np.datetime64('1970-01-01')
        sdate = (date - rdate).astype('d') / 1e9

    longrp = _groupdict(edgere, inlines[1])
    latgrp = _groupdict(edgere, inlines[2])

    for propdict in [daygrp, longrp, latgrp]:
        for k, v in propdict.items():
            try:
                v = eval(v)
            except Exception:
                pass
            setattr(outfile, k, v)
    outfile.HISTORY = ''.join(inlines[:3])

    blat = latgrp.get('start', '59.5')
    bsn = latgrp.get('startdir', 'S')
    elat = latgrp.get('end', '59.5')
    esn = latgrp.get('enddir', 'N')
    latstep = float(latgrp.get('step', '1'))
    blat = {'N': 1, 'S': -1}[bsn] * float(blat)
    elat = {'N': 1, 'S': -1}[esn] * float(elat)

    blon = longrp.get('start', '179.375')
    bwe = longrp.get('startdir', 'W')
    elon = longrp.get('end', '179.375')
    ewe = longrp.get('enddir', 'E')
    lonstep = float(longrp.get('step', '1.25'))
    blon = {'E': 1, 'W': -1}[bwe] * float(blon)
    elon = {'E': 1, 'W': -1}[ewe] * float(elon)

    datalines = inlines[3:]
    lats = []
    for i, line in enumerate(datalines):
        if 'lat' not in line:
            datalines[i] = line[1:-1].rstrip()
        else:
            data, lat = line.split('lat =')
            datalines[i] = data[1:-1].rstrip()
            lats.append(lat.strip())

    nlats = len(lats)
    datablock = ''.join(datalines).replace(' ', '0')
    nlons = len(datablock) // 3 // nlats
    outfile.createDimension('time', 1)
    outfile.createDimension('latitude', nlats)
    outfile.createDimension('longitude', nlons)
    outfile.createDimension('nv', 2)

    var = outfile.createVariable('time', 'f', ('time', ))
    var.units = 'seconds since 1970-01-01 00:00:00+0000'
    var[:] = sdate

    var = outfile.createVariable('latitude', 'f', ('latitude', ))
    var.units = 'degrees N'
    var[:] = np.arange(blat, elat + latstep, latstep)
    lat = var
    linelat = np.array(lats, dtype='f')
    if not (lat[:] == linelat).all():
        warn('Header metadata does not match lats')
        lat[:] = linelat

    var = outfile.createVariable('latitude_bounds', 'f', ('latitude', 'nv'))
    var.units = 'degrees N'
    var[:, 0] = lat - latstep / 2.
    var[:, 1] = lat + latstep / 2.

    var = outfile.createVariable('longitude', 'f', ('longitude', ))
    var.units = 'degrees E'
    lon = var[:] = np.arange(blon, elon + lonstep, lonstep)

    var = outfile.createVariable('longitude_bounds', 'f', ('longitude', 'nv'))
    var.units = 'degrees E'
    var[:, 0] = lon - lonstep / 2.
    var[:, 1] = lon + lonstep / 2.

    var = outfile.createVariable('ozone',
                                 'f', ('latitude', 'longitude'),
                                 missing_value=999)
    var.units = 'matm-cm'
    var.long_name = var.var_desc = 'ozone'.ljust(16)
    var[:] = np.ma.masked_values(
        np.array([i for i in datablock], dtype='S1').view('S3').astype('i'),
        var.missing_value).reshape(nlats, nlons)

    return outfile
Beispiel #15
0
    def _makencf(self):
        from numpy import arange
        tncf = PseudoNetCDFFile()

        tncf.createDimension('TSTEP', 24)
        tncf.createDimension('LAY', 4)
        tncf.createDimension('ROW', 5)
        tncf.createDimension('COL', 6)
        tncf.createDimension('nv', 4)
        tncf.createDimension('tnv', 2)
        tncf.str_one = '1'
        tncf.int_two = 2
        tncf.float_threeptfive = 3.5
        tncf.Conventions = 'CF-1.6'
        o3 = tncf.createVariable('O3', 'f', ('TSTEP', 'LAY', 'ROW', 'COL'))

        o3[:] = arange(24 * 4 * 5 * 6).reshape(24, 4, 5, 6)
        o3.units = 'ppbv'
        o3.grid_mapping = 'lambert_conformal_conic'
        time = tncf.createVariable('time', 'd', ('TSTEP', ))
        time.long_name = 'time'
        time.units = 'hours since 1970-01-01 00:00:00+0000'
        time[:] = np.arange(24)

        timeb = tncf.createVariable('time_bounds', 'd', ('TSTEP', 'tnv'))
        timeb.long_name = 'time_bounds'
        timeb.units = 'hours since 1970-01-01 00:00:00+0000'
        timeb[:, 0] = np.arange(0, 24)
        timeb[:, 1] = np.arange(1, 25)

        crs = tncf.createVariable('lambert_conformal_conic', 'i', ())
        crs.grid_mapping_name = 'lambert_conformal_conic'
        crs.standard_parallel = np.array([30., 45.])
        crs.longitude_of_central_meridian = -97.
        crs.latitude_of_projection_origin = 40.
        crs.false_northing = 1620000.
        crs.false_easting = 2412000.
        crs.semi_major_axis = 6371000.
        crs.semi_minor_axis = 6371000.
        lat = tncf.createVariable('latitude', 'f', ('ROW', 'COL'))
        lat.long_name = 'latitude'
        lat.units = 'degrees_north'
        lon = tncf.createVariable('longitude', 'f', ('ROW', 'COL'))
        lon.long_name = 'longitude'
        lon.units = 'degrees_east'
        latb = tncf.createVariable('latitude_bounds', 'f',
                                   ('ROW', 'COL', 'nv'))
        latb.long_name = 'latitude_bounds'
        latb.units = 'degrees_north'
        lonb = tncf.createVariable('longitude_bounds', 'f',
                                   ('ROW', 'COL', 'nv'))
        lonb.long_name = 'longitude_bounds'
        lonb.units = 'degrees_east'
        lon[:] = [[
            -120.21161038333193, -120.21160114763147, -120.21159191193058,
            -120.21158267622918, -120.21157344052737, -120.21156420482505
        ],
                  [
                      -120.21161271536134, -120.21160347966001,
                      -120.21159424395826, -120.21158500825604,
                      -120.21157577255335, -120.21156653685021
                  ],
                  [
                      -120.21161504739118, -120.21160581168901,
                      -120.21159657598642, -120.21158734028334,
                      -120.2115781045798, -120.2115688688758
                  ],
                  [
                      -120.21161737942151, -120.21160814371851,
                      -120.21159890801503, -120.21158967231109,
                      -120.21158043660672, -120.21157120090189
                  ],
                  [
                      -120.21161971145229, -120.21161047574842,
                      -120.21160124004409, -120.21159200433934,
                      -120.21158276863409, -120.21157353292838
                  ]]

        lat[:] = [[
            22.748507533242535, 22.748509683865187, 22.74851183448702,
            22.74851398510802, 22.748516135728206, 22.748518286347593
        ],
                  [
                      22.74851605050742, 22.748518201130356,
                      22.748520351752475, 22.74852250237377, 22.74852465299425,
                      22.748526803613903
                  ],
                  [
                      22.74852456777256, 22.748526718395773,
                      22.748528869018187, 22.748531019639763,
                      22.748533170260536, 22.74853532088048
                  ],
                  [
                      22.748533085037966, 22.74853523566145, 22.74853738628417,
                      22.748539536906023, 22.7485416875271, 22.748543838147327
                  ],
                  [
                      22.74854160230359, 22.74854375292739, 22.748545903550376,
                      22.748548054172538, 22.748550204793883, 22.7485523554144
                  ]]
        lonb[:] = [[[
            -120.21161038333193, -120.21160114763147, -120.21160347966001,
            -120.21161271536134
        ],
                    [
                        -120.21160114763147, -120.21159191193058,
                        -120.21159424395826, -120.21160347966001
                    ],
                    [
                        -120.21159191193058, -120.21158267622918,
                        -120.21158500825604, -120.21159424395826
                    ],
                    [
                        -120.21158267622918, -120.21157344052737,
                        -120.21157577255335, -120.21158500825604
                    ],
                    [
                        -120.21157344052737, -120.21156420482505,
                        -120.21156653685021, -120.21157577255335
                    ],
                    [
                        -120.21156420482505, -120.2115549691223,
                        -120.2115573011466, -120.21156653685021
                    ]],
                   [[
                       -120.21161271536134, -120.21160347966001,
                       -120.21160581168901, -120.21161504739118
                   ],
                    [
                        -120.21160347966001, -120.21159424395826,
                        -120.21159657598642, -120.21160581168901
                    ],
                    [
                        -120.21159424395826, -120.21158500825604,
                        -120.21158734028334, -120.21159657598642
                    ],
                    [
                        -120.21158500825604, -120.21157577255335,
                        -120.2115781045798, -120.21158734028334
                    ],
                    [
                        -120.21157577255335, -120.21156653685021,
                        -120.2115688688758, -120.2115781045798
                    ],
                    [
                        -120.21156653685021, -120.2115573011466,
                        -120.21155963317135, -120.2115688688758
                    ]],
                   [[
                       -120.21161504739118, -120.21160581168901,
                       -120.21160814371851, -120.21161737942151
                   ],
                    [
                        -120.21160581168901, -120.21159657598642,
                        -120.21159890801503, -120.21160814371851
                    ],
                    [
                        -120.21159657598642, -120.21158734028334,
                        -120.21158967231109, -120.21159890801503
                    ],
                    [
                        -120.21158734028334, -120.2115781045798,
                        -120.21158043660672, -120.21158967231109
                    ],
                    [
                        -120.2115781045798, -120.2115688688758,
                        -120.21157120090189, -120.21158043660672
                    ],
                    [
                        -120.2115688688758, -120.21155963317135,
                        -120.21156196519657, -120.21157120090189
                    ]],
                   [[
                       -120.21161737942151, -120.21160814371851,
                       -120.21161047574842, -120.21161971145229
                   ],
                    [
                        -120.21160814371851, -120.21159890801503,
                        -120.21160124004409, -120.21161047574842
                    ],
                    [
                        -120.21159890801503, -120.21158967231109,
                        -120.21159200433934, -120.21160124004409
                    ],
                    [
                        -120.21158967231109, -120.21158043660672,
                        -120.21158276863409, -120.21159200433934
                    ],
                    [
                        -120.21158043660672, -120.21157120090189,
                        -120.21157353292838, -120.21158276863409
                    ],
                    [
                        -120.21157120090189, -120.21156196519657,
                        -120.21156429722222, -120.21157353292838
                    ]],
                   [[
                       -120.21161971145229, -120.21161047574842,
                       -120.21161280777879, -120.2116220434835
                   ],
                    [
                        -120.21161047574842, -120.21160124004409,
                        -120.21160357207363, -120.21161280777879
                    ],
                    [
                        -120.21160124004409, -120.21159200433934,
                        -120.21159433636801, -120.21160357207363
                    ],
                    [
                        -120.21159200433934, -120.21158276863409,
                        -120.21158510066192, -120.21159433636801
                    ],
                    [
                        -120.21158276863409, -120.21157353292838,
                        -120.21157586495535, -120.21158510066192
                    ],
                    [
                        -120.21157353292838, -120.21156429722222,
                        -120.21156662924835, -120.21157586495535
                    ]]]
        latb[:] = [[[
            22.748507533242535, 22.748509683865187, 22.748518201130356,
            22.74851605050742
        ],
                    [
                        22.748509683865187, 22.74851183448702,
                        22.748520351752475, 22.748518201130356
                    ],
                    [
                        22.74851183448702, 22.74851398510802,
                        22.74852250237377, 22.748520351752475
                    ],
                    [
                        22.74851398510802, 22.748516135728206,
                        22.74852465299425, 22.74852250237377
                    ],
                    [
                        22.748516135728206, 22.748518286347593,
                        22.748526803613903, 22.74852465299425
                    ],
                    [
                        22.748518286347593, 22.748520436966125,
                        22.748528954232754, 22.748526803613903
                    ]],
                   [[
                       22.74851605050742, 22.748518201130356,
                       22.748526718395773, 22.74852456777256
                   ],
                    [
                        22.748518201130356, 22.748520351752475,
                        22.748528869018187, 22.748526718395773
                    ],
                    [
                        22.748520351752475, 22.74852250237377,
                        22.748531019639763, 22.748528869018187
                    ],
                    [
                        22.74852250237377, 22.74852465299425,
                        22.748533170260536, 22.748531019639763
                    ],
                    [
                        22.74852465299425, 22.748526803613903,
                        22.74853532088048, 22.748533170260536
                    ],
                    [
                        22.748526803613903, 22.748528954232754,
                        22.748537471499613, 22.74853532088048
                    ]],
                   [[
                       22.74852456777256, 22.748526718395773,
                       22.74853523566145, 22.748533085037966
                   ],
                    [
                        22.748526718395773, 22.748528869018187,
                        22.74853738628417, 22.74853523566145
                    ],
                    [
                        22.748528869018187, 22.748531019639763,
                        22.748539536906023, 22.74853738628417
                    ],
                    [
                        22.748531019639763, 22.748533170260536,
                        22.7485416875271, 22.748539536906023
                    ],
                    [
                        22.748533170260536, 22.74853532088048,
                        22.748543838147327, 22.7485416875271
                    ],
                    [
                        22.74853532088048, 22.748537471499613,
                        22.748545988766764, 22.748543838147327
                    ]],
                   [[
                       22.748533085037966, 22.74853523566145,
                       22.74854375292739, 22.74854160230359
                   ],
                    [
                        22.74853523566145, 22.74853738628417,
                        22.748545903550376, 22.74854375292739
                    ],
                    [
                        22.74853738628417, 22.748539536906023,
                        22.748548054172538, 22.748545903550376
                    ],
                    [
                        22.748539536906023, 22.7485416875271,
                        22.748550204793883, 22.748548054172538
                    ],
                    [
                        22.7485416875271, 22.748543838147327, 22.7485523554144,
                        22.748550204793883
                    ],
                    [
                        22.748543838147327, 22.748545988766764,
                        22.748554506034104, 22.7485523554144
                    ]],
                   [[
                       22.74854160230359, 22.74854375292739, 22.74855227019359,
                       22.748550119569494
                   ],
                    [
                        22.74854375292739, 22.748545903550376,
                        22.748554420816852, 22.74855227019359
                    ],
                    [
                        22.748545903550376, 22.748548054172538,
                        22.74855657143929, 22.748554420816852
                    ],
                    [
                        22.748548054172538, 22.748550204793883,
                        22.74855872206093, 22.74855657143929
                    ],
                    [
                        22.748550204793883, 22.7485523554144,
                        22.748560872681754, 22.74855872206093
                    ],
                    [
                        22.7485523554144, 22.748554506034104,
                        22.748563023301763, 22.748560872681754
                    ]]]
        return tncf
Beispiel #16
0
def wrt_uamiv(fout_path, fin, *, lsurf = False, lapp = False, lemis = False, lcmaq = False, ounits = None):
  """
  Create an UAMIV file using fin class.
  Arguments:
     fout_path - The IOAPI file name including path
     fin       - A class which include all the contents, data arrays, attributes, variable names.
                 The data array shape is (nspc,nsteps,nz,ny,nx).
     lsurf     - If True, output file has only 1 layer regardless of shape of data array.
     lapp      - If True, output file must already exist. Skip writing file dimensions and attributes.
                 It simply appends data arrays into the existing output file.
     lemis     - If True, it is emission file, and variable units have rates instead of concentration
     lcmaq     - If True, the file is for CMAQ, and varnames are checked against CMAQ PM species
                 so that PM unit is assigned, microgram/m**3 instead of moles/m**3.
     ounits    - A list which has units of variables
  """
  # Handling arguments
  lounit = True
  if ounits == None:
    lounit = False
    ounits = []

  # Include modules
  import datetime
  from PseudoNetCDF import PseudoNetCDFFile
  from PseudoNetCDF.pncgen import pncgen
  from PseudoNetCDF import PNC
  from PseudoNetCDF.sci_var import stack_files
  from CAMxtools.write.wrt_ioapi import find_unit
  from pathlib import Path
  import os

  #prepare file attributes
  novars = getattr(fin,'NVARS')
  varnames = getattr(fin,'VAR-LIST').split()
  var0 = varnames[0]
  nsteps = fin.variables[var0].shape[0]
  nz = getattr(fin,'NLAYS')
  ny = getattr(fin,'NROWS')
  nx = getattr(fin,'NCOLS')
  lgrdded = False
  if getattr(fin,'FTYPE') == 1: lgrdded = True
  if lgrdded: # GRIDDED
    assert len(fin.variables[var0].shape) == 4, "len(fin.variables[var0].shape) MUST be 4"
  else: # BOUNDARY CONDITION
    assert len(fin.variables[var0].shape) == 3, "len(fin.variables[var0].shape) MUST be 3"
    ncells = fin.variables[var0].shape[2]
    assert ncells == 2*(nx+ny)+4, "ncells MUST be 2*(nx+ny)+4"

  #open output file
  if not lgrdded: #BOUNDARY
    ftype = 'lateral_boundary'
  else:
    ftype = 'uamiv'
  newf = PseudoNetCDFFile()

  # Set tstep
  tstep = getattr(fin,'TSTEP')

  #copy dimensions
  dimensions_keys = "TSTEP DATE-TIME LAY VAR".split() # These dimension keys must be in the file to be used by m3tools
  if lgrdded: # GRIDDED
    dimensions_keys.append("ROW"); dimensions_keys.append("COL")
  else:
    dimensions_keys.append("PERIM")
  for i in dimensions_keys:
    if i == 'VAR': size = novars
    elif i == 'TSTEP': size = nsteps
    elif i == 'DATE-TIME': size = 2
    elif i == 'LAY':
       if lsurf:
         size = 1
       else:
         size = nz
    elif i == 'VAR': size = novars
    elif i == 'ROW': size = ny
    elif i == 'COL': size = nx
    else : size = ncells # i == 'PERIM'
    newf.createDimension(i,size)

  #copy global attributes
  attribs = "XORIG YORIG XCELL YCELL PLON PLAT TLAT1 TLAT2 IUTM ISTAG CPROJ GDTYP XCENT YCENT P_ALP P_BET P_GAM NLAYS NROWS NCOLS NVARS VAR-LIST NAME NOTE ITZON FTYPE VGTYP VGTOP VGLVLS GDNAM UPNAM FILEDESC SDATE STIME TSTEP Conventions history".split() # These attributes must be in the file for pncgen to uamiv
  cdate = int(datetime.date.today().strftime("%Y%j"))
  ctime = int(datetime.datetime.now().strftime("%H%M%S"))
  for i in attribs:
    try: val = getattr(fin,i)
    except: val = ""
    if 'numpy.float32' in str(type(val)):
       val = val.item()
    if i == 'PLON': val = getattr(fin,'XCENT')
    if i == 'PLAT': val = getattr(fin,'YCENT')
    if i == 'TLAT1': val = getattr(fin,'P_ALP')
    if i == 'TLAT2': val = getattr(fin,'P_GAM')
    if i == 'IUTM':
      val = 0
      if getattr(fin,'GDTYP') == 5: val = 1
    if i == 'ISTAG': val = 0
    if i == 'CPROJ':
      if getattr(fin,'GDTYP') == 1: #LATLON
        val = 0
      elif getattr(fin,'GDTYP') == 5: #UTM
        val = 1
      elif getattr(fin,'GDTYP') == 2: #LCP
        val = 2
      elif getattr(fin,'GDTYP') == 6: #PSP
        val = 4
      elif getattr(fin,'GDTYP') == 7: #Equatorial Mercator
        val = 5
      else:
        print("GDTYP = {}".format(GDTYP))
        exit("Not relevant projection")
    if i == 'NLAYS':
       if lsurf:
         val = 1
       else:
         size = nz
    if i == 'NROWS': val = ny
    if i == 'NCOLS': val = nx
    if i == 'NVARS': val = novars
    if i == 'NSTEPS': val = nsteps
    if i == 'NAME':
      name_str = 'AVERAGE'
      if lemis: name_str = 'EMISSIONS'
      if not lgrdded: name_str = 'BOUNDARY'
      val = '{:<10s}'.format(name_str)
    if i == 'NOTE': val = '{:<60s}'.format("wrt_uamiv in CAMxtools")
    if i == 'ITZON': val = 0
    if i == 'VGTYP':
       if lsurf or nz == 1:
         val = -9999
       else:
         size = 2 # VGSGPN3 non-h sigma-p
    if i == 'VGTOP':
       if lsurf or nz == 1:
         val = -9.999E36
       else:
         size = getattr(fin,'VGTOP')
    if i == 'GDNAM' or i == 'UPNAM' or i == 'FILEDESC':
       val = '{:<16s}'.format("CAMx")
    if i == 'Conventions': val = "CF-1.6"
    if i == 'history': val = '{:<250s}'.format("unspecified")
    setattr(newf,i,val)

  #copy variables
  extra = "TFLAG".split()
  for i in extra+varnames:
    if i == 'TFLAG':
      newf.createVariable(i,('int32'),(u'TSTEP', u'VAR', u'DATE-TIME'))
      newf.variables[i].setncattr("units","<YYYYDDD,HHMMSS>")
      newf.variables[i].setncattr("long_name","TFLAG")
      newf.variables[i].setncattr("var_desc",'{:<80s}'.format("Timestep-valid flags:  (1) YYYYDDD or (2) HHMMSS"))
      idate = getattr(fin,'SDATE')
      itime = getattr(fin,'STIME')
      for istep in range(nsteps):
        newf.variables['TFLAG'][istep,:,0] = [idate for ivar in range(len(varnames))]
        newf.variables['TFLAG'][istep,:,1] = [itime for ivar in range(len(varnames))]
        itime += tstep
        if itime == 240000:
          itime = 0
          idate = int((datetime.datetime.strptime(str(idate),"%Y%j") + datetime.timedelta(days=1)).strftime("%Y%j"))
    else:
      if lgrdded: # GRIDDED
        var_type = str(type(fin.variables[var0][0,0,0,0]))
      else:
        var_type = str(type(fin.variables[var0][0,0,0]))
      if 'numpy.float' in var_type:
        dtype = 'float32'
      else:
        dtype = 'int32'
      if lgrdded: # GRIDDED
        newf.createVariable(i, (dtype), ('TSTEP', 'LAY', 'ROW', 'COL'))
      else:
        newf.createVariable(i, (dtype), ('TSTEP', 'LAY', 'PERIM'))
      #Find a relevant unit
      if lounit:
        s = varnames.index(i)
        unit = ounits[s]
      else:
        unit = find_unit(i,lemis=lemis,lcmaq=lcmaq)
      newf.variables[i].setncattr("long_name",'{:<16s}'.format(i))
      newf.variables[i].setncattr("units",'{:<16s}'.format(unit))
      newf.variables[i].setncattr("var_desc",'{:<80s}'.format("".join(["VARIABLE ",i])))
      newf.variables[i]=fin.variables[i]

  # Create UAMIV file
  if lapp: # Save the original fout_path to fout_old_path
    fout_old_path = fout_path + ".old"
    try:
      os.rename(fout_path,fout_old_path) # Move the original output to fout_old_path
      #pncargs = '--format=' + ftype + ',mode="r+"'
      #pncargs = '--format=' + ftype
    except:
      print("Output file is {}".format(fout_path))
      exit("Output file does not exist while lapp is True")
    pncargs = '--format=' + ftype
    oldfile = PNC(pncargs, fout_old_path).ifiles[0]
    fout_tmp_path = fout_path + ".tmp"
    pncgen(newf, fout_tmp_path, format = ftype)
    tmpfile = PNC(pncargs, fout_tmp_path).ifiles[0]
    newf_app = stack_files([oldfile, tmpfile], 'TSTEP')
    pncgen(newf_app, fout_path, format = ftype)
    if Path(fout_old_path).exists(): os.remove(fout_old_path)
    if Path(fout_tmp_path).exists(): os.remove(fout_tmp_path)
  else:
    pncgen(newf, fout_path, format = ftype)

  # close files
  if 'newfile' in locals(): del newfile
  del fin
  if 'newf' in locals(): newf.close()
  print ('*** SUCCESS writing UAMIV file')
  return