Exemplo n.º 1
0
def bom_ascii_to_nc(year,dates,froot):

    debug = False
    latvec = None
    lonvec = None
    ncobj = None
    latslice = slice(None,None,None)
    lonslice = slice(None,None,None)
    missed_dates = []
    adict = {}

    latvec,lonvec,adict = get_basics()
    miss = adict['missing']

    for dti,dt in enumerate(dates):
        index = (slice(dti,None,None),latslice,lonslice)

        # Initialise the netcdf object
        if ncobj is None:
            if debug: print "Define ncobj:",dt
            dni_var = 'solar_dni'
            ghi_var = 'solar_ghi'
            obs_var = 'obs_time'
            ncobj = nb.nc_open(froot+'.nc','w',format='NETCDF4_CLASSIC')
            nb.nc_set_timelatlon(ncobj,None,len(latvec),len(lonvec))
            nb.nc_set_var(ncobj,dni_var,fill=miss,zlib=True)
            nb.nc_set_var(ncobj,ghi_var,fill=miss,zlib=True)
            nb.nc_set_var(ncobj,obs_var,fill=miss,zlib=True)
            nb.nc_set_var(ncobj,'crs',dims=(),dtype="i4")  # Grid mapping container
            nb.nc_add_data(ncobj,'latitude',latvec)
            nb.nc_add_data(ncobj,'longitude',lonvec)

            dni_nc = ncobj.variables[dni_var]
            ghi_nc = ncobj.variables[ghi_var]
            obs_nc = ncobj.variables[obs_var]

        # Calculate the observation time layer
        obs_data = create_obs_time(latvec,lonvec,dt)
        obs_nc[dti,:,:] = obs_data
         
    # Add time values
    nb.nc_add_time(ncobj,dates)

    # Create an ordered metadata dictionary
    meta = create_meta(dates[0],dates[-1],adict)

    # Output the file-specific metadata to a JSON file regardless of the
    # requested output formats
    jh.json_dump(meta, froot+'.json')
    
    # Setting attributes here is optional in this example because it is
    # known that netcdf_json_wrapper.py will be called next with
    # 'froot+".json"' as one of the input files.
    nb.nc_set_attributes(ncobj,meta)
    if debug: print "Added attributes"
    nb.nc_close(ncobj)
    print "Wrote: "+froot+".nc"
    return froot
def lpdaac_to_nc(hdfpath):
    debug = defdebug
    hdfnames = glob(hdfpath + os.sep + "M*hdf")
    latvec = None
    var2miss = OrderedDict()
    var2type = OrderedDict()
    for hdfname in hdfnames:
        hdfparts = parse_hdfname(hdfname)
        hdfobj = get_hdfobj(hdfname)
        if latvec is None:
            latvec, lonvec = get_hdfdimvecs(hdfobj)
        hdfdict = get_hdfvardict(hdfobj, hdfparts["sds"])
        hdftype = hdfobj.datasets()[hdfparts["sds"]][2]
        hdfobj.end()
        ncvar = sds2var(hdfparts["sds"])
        # Need number type as well
        var2type[ncvar] = get_nctype(hdftype)
        var2miss[ncvar] = None
        if "_FillValue" in hdfdict:
            var2miss[ncvar] = hdfdict["_FillValue"]

    stem = "{product}.{collection}.{region}.{year}{doy}".format(**hdfparts)
    ncfile = stem + ".nc"
    jsonfile = stem + ".json"
    if os.path.exists(ncfile):
        os.remove(ncfile)
    ncobj = set_ncobj(ncfile, var2miss, var2type, [hdfparts["datetime"]], latvec, lonvec)
    meta = set_ncmeta([hdfparts["datetime"]], latvec, lonvec)

    for hdfname in hdfnames:
        t0 = time.clock()
        if debug:
            print hdfname
        hdfparts = parse_hdfname(hdfname)
        hdfobj = get_hdfobj(hdfname)
        hdfvar = get_hdfvar(hdfobj, hdfparts["sds"])
        hdfdict = get_hdfvardict(hdfobj, hdfparts["sds"])
        hdfobj.end()
        ncvar = sds2var(hdfparts["sds"])
        ncobj.variables[ncvar][0, :, :] = hdfvar
        # nb.nc_add_data(ncobj,ncvar,hdfvar,(0,':',':'))
        meta = update_ncmeta(meta, ncvar, hdfdict)
        if deftime:
            print "Time ({}): {}".format(ncvar, time.clock() - t0)

    # jh.json_dump(jsonfile,meta)
    # nb.nc_set_attributes(ncobj,meta)
    nb.nc_close(ncobj)
Exemplo n.º 3
0
def lpdaac_to_nc(hdfpath):
    debug = defdebug
    hdfnames = glob(hdfpath + os.sep + 'M*hdf')
    latvec = None
    var2miss = OrderedDict()
    var2type = OrderedDict()
    for hdfname in hdfnames:
        hdfparts = parse_hdfname(hdfname)
        hdfobj = get_hdfobj(hdfname)
        if latvec is None:
            latvec, lonvec = get_hdfdimvecs(hdfobj)
        hdfdict = get_hdfvardict(hdfobj, hdfparts['sds'])
        hdftype = hdfobj.datasets()[hdfparts['sds']][2]
        hdfobj.end()
        ncvar = sds2var(hdfparts['sds'])
        # Need number type as well
        var2type[ncvar] = get_nctype(hdftype)
        var2miss[ncvar] = None
        if '_FillValue' in hdfdict: var2miss[ncvar] = hdfdict['_FillValue']

    stem = '{product}.{collection}.{region}.{year}{doy}'.format(**hdfparts)
    ncfile = stem + '.nc'
    jsonfile = stem + '.json'
    if os.path.exists(ncfile): os.remove(ncfile)
    ncobj = set_ncobj(ncfile, var2miss, var2type, [hdfparts['datetime']],
                      latvec, lonvec)
    meta = set_ncmeta([hdfparts['datetime']], latvec, lonvec)

    for hdfname in hdfnames:
        t0 = time.clock()
        if debug: print hdfname
        hdfparts = parse_hdfname(hdfname)
        hdfobj = get_hdfobj(hdfname)
        hdfvar = get_hdfvar(hdfobj, hdfparts['sds'])
        hdfdict = get_hdfvardict(hdfobj, hdfparts['sds'])
        hdfobj.end()
        ncvar = sds2var(hdfparts['sds'])
        ncobj.variables[ncvar][0, :, :] = hdfvar
        #nb.nc_add_data(ncobj,ncvar,hdfvar,(0,':',':'))
        meta = update_ncmeta(meta, ncvar, hdfdict)
        if deftime: print "Time ({}): {}".format(ncvar, time.clock() - t0)

    #jh.json_dump(jsonfile,meta)
    #nb.nc_set_attributes(ncobj,meta)
    nb.nc_close(ncobj)
Exemplo n.º 4
0
def get_basics():
    ncfile = '/data/remotesensing/MTSAT-BoM/nc2/2000/solar_dni_ghi_20000101.nc'
    jsonfile = re.sub('.nc$','.json',ncfile)
    ncobj = nb.nc_open(ncfile,'r')
    latvec = ncobj.variables['latitude'][:]
    lonvec = ncobj.variables['longitude'][:]
    #meta = nb.nc_get_attributes(ncobj)
    meta = jh.json_load(fname=jsonfile)
    nb.nc_close(ncobj)
    d = {}
    d['ymin'] = float(meta['geospatial_lat_min'])
    d['ymax'] = float(meta['geospatial_lat_max'])
    d['ystep'] = float(meta['geospatial_lat_step'])
    d['yunits'] = meta['geospatial_lat_units']
    d['xmin'] = float(meta['geospatial_lon_min'])
    d['xmax'] = float(meta['geospatial_lon_max'])
    d['xstep'] = float(meta['geospatial_lon_step'])
    d['xunits'] = meta['geospatial_lon_units']
    #d['missing'] = ncobj.variables['solar_dni']._FillValue
    d['missing'] = -999
    return latvec,lonvec,d
def bom_ascii_to_nc(year,dates,froot):

    latvec = None
    lonvec = None
    ncobj = None
    latslice = slice(None,None,None)
    lonslice = slice(None,None,None)
    missed_dates = []

    for dti,dt in enumerate(dates):
        dni,ghi = get_solar_files(str(year),dt)
        index = (slice(dti,None,None),latslice,lonslice)

        if dni is not None:
            # Split the input file into metadata and data components
            dni_head,dni_rows,dni_history = split_bom_file(dni)
            # Resample the data
            dni_data,dni_lat,dni_lon,dni_dict = resample_data(dni_rows,dni_head)

        if ghi is not None:
            # Split the input file into metadata and data components
            ghi_head,ghi_rows,ghi_history = split_bom_file(ghi)
            # Resample the data
            ghi_data,ghi_lat,ghi_lon,ghi_dict = resample_data(ghi_rows,ghi_head)

        # Skip initial dates until we get a valid file because we need lat,lon
        if latvec is None and dni is not None:
            latvec = dni_lat
            lonvec = dni_lon
            miss = dni_dict['missing']
        if latvec is None and ghi is not None:
            latvec = ghi_lat
            lonvec = ghi_lon
            miss = ghi_dict['missing']
        if latvec is None:
            missed_dates.append(dt)
            continue

        # Initialise the netcdf object
        if ncobj is None:
            dni_var = 'solar_dni'
            ghi_var = 'solar_ghi'
            obs_var = 'obs_time'
            ncobj = nb.nc_open(froot+'.nc','w')
            #ncobj = nb.nc_open(froot+'.nc','w',format='NETCDF4_CLASSIC')
            nb.nc_set_timelatlon(ncobj,None,len(latvec),len(lonvec))
            nb.nc_set_var(ncobj,dni_var,fill=miss)
            nb.nc_set_var(ncobj,ghi_var,fill=miss)
            nb.nc_set_var(ncobj,obs_var,fill=miss)
            #nb.nc_set_var(ncobj,dni_var,fill=miss,zlib=True)
            #nb.nc_set_var(ncobj,ghi_var,fill=miss,zlib=True)
            #nb.nc_set_var(ncobj,obs_var,fill=miss,zlib=True)
            nb.nc_set_var(ncobj,'crs',dims=(),dtype="i4")  # Grid mapping container
            nb.nc_add_data(ncobj,'latitude',latvec)
            nb.nc_add_data(ncobj,'longitude',lonvec)

            dni_nc = ncobj.variables[dni_var]
            ghi_nc = ncobj.variables[ghi_var]
            obs_nc = ncobj.variables[obs_var]

        # Add observation time layers for any missed dates
        for mi,md in enumerate(missed_dates):
            obs_data = create_obs_time(latvec,lonvec,md)
            #nb.nc_add_data(ncobj,obs_var,obs_data,
            #               index=(slice(mi,None,None),latslice,lonslice))
            obs_nc[mi,:,:] = obs_data
            missed_dates = []

        # Calculate the observation time layer
        obs_data = create_obs_time(latvec,lonvec,dt)
        obs_nc[dti,:,:] = obs_data

        # Add data
        if dni is not None:
            #nb.nc_add_data(ncobj,dni_var,dni_data,index=index)
            dni_nc[dti,:,:] = dni_data
        if ghi is not None:
            #nb.nc_add_data(ncobj,ghi_var,ghi_data,index=index)
            ghi_nc[dti,:,:] = ghi_data
         
    # Add time values
    nb.nc_add_time(ncobj,dates)

    # Create an ordered metadata dictionary
    meta = create_meta(dates[0],dates[-1],dni_dict)

    # Output the file-specific metadata to a JSON file regardless of the
    # requested output formats
    jh.json_dump(meta, froot+'.json')
    
    # Setting attributes here is optional in this example because it is
    # known that netcdf_json_wrapper.py will be called next with
    # 'froot+".json"' as one of the input files.
    nb.nc_set_attributes(ncobj,meta)
    print "Added attributes"
    nb.nc_close(ncobj)
    print "Wrote: "+froot+".nc"
    return froot
def bom_ascii_to_nc(year, dates, froot):

    debug = False
    latvec = None
    lonvec = None
    ncobj = None
    latslice = slice(None, None, None)
    lonslice = slice(None, None, None)
    missed_dates = []
    adict = {}

    for dti, dt in enumerate(dates):
        dni, ghi = get_solar_files(str(year), dt)
        index = (slice(dti, None, None), latslice, lonslice)

        if dni is not None:
            # Split the input file into metadata and data components
            dni_head, dni_rows, dni_history = split_bom_file(dni)
            # Resample the data
            dni_data, dni_lat, dni_lon, dni_dict = resample_data(
                dni_rows, dni_head)
        else:
            if debug: print "No dni data:", dt

        if ghi is not None:
            # Split the input file into metadata and data components
            ghi_head, ghi_rows, ghi_history = split_bom_file(ghi)
            # Resample the data
            ghi_data, ghi_lat, ghi_lon, ghi_dict = resample_data(
                ghi_rows, ghi_head)
        else:
            if debug: print "No ghi data:", dt

        # Skip initial dates until we get a valid file because we need lat,lon
        if latvec is None and dni is not None:
            if debug: print "Using dni_lat:", dt
            latvec = dni_lat
            lonvec = dni_lon
            adict = dni_dict
            miss = adict['missing']
        if latvec is None and ghi is not None:
            if debug: print "Using ghi_lat:", dt
            latvec = ghi_lat
            lonvec = ghi_lon
            adict = ghi_dict
            miss = adict['missing']
        if latvec is None:
            if debug: print "Save miss:", dt
            missed_dates.append(dt)
            continue

        # Initialise the netcdf object
        if ncobj is None:
            if debug: print "Define ncobj:", dt
            dni_var = 'solar_dni'
            ghi_var = 'solar_ghi'
            obs_var = 'obs_time'
            ncobj = nb.nc_open(froot + '.nc', 'w', format='NETCDF4_CLASSIC')
            nb.nc_set_timelatlon(ncobj, None, len(latvec), len(lonvec))
            nb.nc_set_var(ncobj, dni_var, fill=miss, zlib=True)
            nb.nc_set_var(ncobj, ghi_var, fill=miss, zlib=True)
            nb.nc_set_var(ncobj, obs_var, fill=miss, zlib=True)
            nb.nc_set_var(ncobj, 'crs', dims=(),
                          dtype="i4")  # Grid mapping container
            nb.nc_add_data(ncobj, 'latitude', latvec)
            nb.nc_add_data(ncobj, 'longitude', lonvec)

            dni_nc = ncobj.variables[dni_var]
            ghi_nc = ncobj.variables[ghi_var]
            obs_nc = ncobj.variables[obs_var]

        # Add observation time layers for any missed dates
        for mi, md in enumerate(missed_dates):
            if debug: print "Add missed:", md
            obs_data = create_obs_time(latvec, lonvec, md)
            #nb.nc_add_data(ncobj,obs_var,obs_data,
            #               index=(slice(mi,None,None),latslice,lonslice))
            obs_nc[mi, :, :] = obs_data
            missed_dates = []

        # Calculate the observation time layer
        obs_data = create_obs_time(latvec, lonvec, dt)
        obs_nc[dti, :, :] = obs_data

        # Add data
        if dni is not None:
            #nb.nc_add_data(ncobj,dni_var,dni_data,index=index)
            if debug: print "Add dni:", dni
            dni_nc[dti, :, :] = dni_data
        if ghi is not None:
            if debug: print "Add ghi:", ghi
            #nb.nc_add_data(ncobj,ghi_var,ghi_data,index=index)
            ghi_nc[dti, :, :] = ghi_data

    # Add time values
    nb.nc_add_time(ncobj, dates)

    # Create an ordered metadata dictionary
    meta = create_meta(dates[0], dates[-1], adict)

    # Output the file-specific metadata to a JSON file regardless of the
    # requested output formats
    jh.json_dump(meta, froot + '.json')

    # Setting attributes here is optional in this example because it is
    # known that netcdf_json_wrapper.py will be called next with
    # 'froot+".json"' as one of the input files.
    nb.nc_set_attributes(ncobj, meta)
    if debug: print "Added attributes"
    nb.nc_close(ncobj)
    print "Wrote: " + froot + ".nc"
    return froot