Exemple #1
0
def bom_ascii_to_nc(year,dates,froot):

    debug = False
    latvec = None
    lonvec = None
    ncobj = None
    latslice = slice(None,None,None)
    lonslice = slice(None,None,None)
    missed_dates = []
    adict = {}

    latvec,lonvec,adict = get_basics()
    miss = adict['missing']

    for dti,dt in enumerate(dates):
        index = (slice(dti,None,None),latslice,lonslice)

        # Initialise the netcdf object
        if ncobj is None:
            if debug: print "Define ncobj:",dt
            dni_var = 'solar_dni'
            ghi_var = 'solar_ghi'
            obs_var = 'obs_time'
            ncobj = nb.nc_open(froot+'.nc','w',format='NETCDF4_CLASSIC')
            nb.nc_set_timelatlon(ncobj,None,len(latvec),len(lonvec))
            nb.nc_set_var(ncobj,dni_var,fill=miss,zlib=True)
            nb.nc_set_var(ncobj,ghi_var,fill=miss,zlib=True)
            nb.nc_set_var(ncobj,obs_var,fill=miss,zlib=True)
            nb.nc_set_var(ncobj,'crs',dims=(),dtype="i4")  # Grid mapping container
            nb.nc_add_data(ncobj,'latitude',latvec)
            nb.nc_add_data(ncobj,'longitude',lonvec)

            dni_nc = ncobj.variables[dni_var]
            ghi_nc = ncobj.variables[ghi_var]
            obs_nc = ncobj.variables[obs_var]

        # Calculate the observation time layer
        obs_data = create_obs_time(latvec,lonvec,dt)
        obs_nc[dti,:,:] = obs_data
         
    # Add time values
    nb.nc_add_time(ncobj,dates)

    # Create an ordered metadata dictionary
    meta = create_meta(dates[0],dates[-1],adict)

    # Output the file-specific metadata to a JSON file regardless of the
    # requested output formats
    jh.json_dump(meta, froot+'.json')
    
    # Setting attributes here is optional in this example because it is
    # known that netcdf_json_wrapper.py will be called next with
    # 'froot+".json"' as one of the input files.
    nb.nc_set_attributes(ncobj,meta)
    if debug: print "Added attributes"
    nb.nc_close(ncobj)
    print "Wrote: "+froot+".nc"
    return froot
def bom_ascii_to_nc(year,dates,froot):

    latvec = None
    lonvec = None
    ncobj = None
    latslice = slice(None,None,None)
    lonslice = slice(None,None,None)
    missed_dates = []

    for dti,dt in enumerate(dates):
        dni,ghi = get_solar_files(str(year),dt)
        index = (slice(dti,None,None),latslice,lonslice)

        if dni is not None:
            # Split the input file into metadata and data components
            dni_head,dni_rows,dni_history = split_bom_file(dni)
            # Resample the data
            dni_data,dni_lat,dni_lon,dni_dict = resample_data(dni_rows,dni_head)

        if ghi is not None:
            # Split the input file into metadata and data components
            ghi_head,ghi_rows,ghi_history = split_bom_file(ghi)
            # Resample the data
            ghi_data,ghi_lat,ghi_lon,ghi_dict = resample_data(ghi_rows,ghi_head)

        # Skip initial dates until we get a valid file because we need lat,lon
        if latvec is None and dni is not None:
            latvec = dni_lat
            lonvec = dni_lon
            miss = dni_dict['missing']
        if latvec is None and ghi is not None:
            latvec = ghi_lat
            lonvec = ghi_lon
            miss = ghi_dict['missing']
        if latvec is None:
            missed_dates.append(dt)
            continue

        # Initialise the netcdf object
        if ncobj is None:
            dni_var = 'solar_dni'
            ghi_var = 'solar_ghi'
            obs_var = 'obs_time'
            ncobj = nb.nc_open(froot+'.nc','w')
            #ncobj = nb.nc_open(froot+'.nc','w',format='NETCDF4_CLASSIC')
            nb.nc_set_timelatlon(ncobj,None,len(latvec),len(lonvec))
            nb.nc_set_var(ncobj,dni_var,fill=miss)
            nb.nc_set_var(ncobj,ghi_var,fill=miss)
            nb.nc_set_var(ncobj,obs_var,fill=miss)
            #nb.nc_set_var(ncobj,dni_var,fill=miss,zlib=True)
            #nb.nc_set_var(ncobj,ghi_var,fill=miss,zlib=True)
            #nb.nc_set_var(ncobj,obs_var,fill=miss,zlib=True)
            nb.nc_set_var(ncobj,'crs',dims=(),dtype="i4")  # Grid mapping container
            nb.nc_add_data(ncobj,'latitude',latvec)
            nb.nc_add_data(ncobj,'longitude',lonvec)

            dni_nc = ncobj.variables[dni_var]
            ghi_nc = ncobj.variables[ghi_var]
            obs_nc = ncobj.variables[obs_var]

        # Add observation time layers for any missed dates
        for mi,md in enumerate(missed_dates):
            obs_data = create_obs_time(latvec,lonvec,md)
            #nb.nc_add_data(ncobj,obs_var,obs_data,
            #               index=(slice(mi,None,None),latslice,lonslice))
            obs_nc[mi,:,:] = obs_data
            missed_dates = []

        # Calculate the observation time layer
        obs_data = create_obs_time(latvec,lonvec,dt)
        obs_nc[dti,:,:] = obs_data

        # Add data
        if dni is not None:
            #nb.nc_add_data(ncobj,dni_var,dni_data,index=index)
            dni_nc[dti,:,:] = dni_data
        if ghi is not None:
            #nb.nc_add_data(ncobj,ghi_var,ghi_data,index=index)
            ghi_nc[dti,:,:] = ghi_data
         
    # Add time values
    nb.nc_add_time(ncobj,dates)

    # Create an ordered metadata dictionary
    meta = create_meta(dates[0],dates[-1],dni_dict)

    # Output the file-specific metadata to a JSON file regardless of the
    # requested output formats
    jh.json_dump(meta, froot+'.json')
    
    # Setting attributes here is optional in this example because it is
    # known that netcdf_json_wrapper.py will be called next with
    # 'froot+".json"' as one of the input files.
    nb.nc_set_attributes(ncobj,meta)
    print "Added attributes"
    nb.nc_close(ncobj)
    print "Wrote: "+froot+".nc"
    return froot
def bom_ascii_to_nc(year, dates, froot):

    debug = False
    latvec = None
    lonvec = None
    ncobj = None
    latslice = slice(None, None, None)
    lonslice = slice(None, None, None)
    missed_dates = []
    adict = {}

    for dti, dt in enumerate(dates):
        dni, ghi = get_solar_files(str(year), dt)
        index = (slice(dti, None, None), latslice, lonslice)

        if dni is not None:
            # Split the input file into metadata and data components
            dni_head, dni_rows, dni_history = split_bom_file(dni)
            # Resample the data
            dni_data, dni_lat, dni_lon, dni_dict = resample_data(
                dni_rows, dni_head)
        else:
            if debug: print "No dni data:", dt

        if ghi is not None:
            # Split the input file into metadata and data components
            ghi_head, ghi_rows, ghi_history = split_bom_file(ghi)
            # Resample the data
            ghi_data, ghi_lat, ghi_lon, ghi_dict = resample_data(
                ghi_rows, ghi_head)
        else:
            if debug: print "No ghi data:", dt

        # Skip initial dates until we get a valid file because we need lat,lon
        if latvec is None and dni is not None:
            if debug: print "Using dni_lat:", dt
            latvec = dni_lat
            lonvec = dni_lon
            adict = dni_dict
            miss = adict['missing']
        if latvec is None and ghi is not None:
            if debug: print "Using ghi_lat:", dt
            latvec = ghi_lat
            lonvec = ghi_lon
            adict = ghi_dict
            miss = adict['missing']
        if latvec is None:
            if debug: print "Save miss:", dt
            missed_dates.append(dt)
            continue

        # Initialise the netcdf object
        if ncobj is None:
            if debug: print "Define ncobj:", dt
            dni_var = 'solar_dni'
            ghi_var = 'solar_ghi'
            obs_var = 'obs_time'
            ncobj = nb.nc_open(froot + '.nc', 'w', format='NETCDF4_CLASSIC')
            nb.nc_set_timelatlon(ncobj, None, len(latvec), len(lonvec))
            nb.nc_set_var(ncobj, dni_var, fill=miss, zlib=True)
            nb.nc_set_var(ncobj, ghi_var, fill=miss, zlib=True)
            nb.nc_set_var(ncobj, obs_var, fill=miss, zlib=True)
            nb.nc_set_var(ncobj, 'crs', dims=(),
                          dtype="i4")  # Grid mapping container
            nb.nc_add_data(ncobj, 'latitude', latvec)
            nb.nc_add_data(ncobj, 'longitude', lonvec)

            dni_nc = ncobj.variables[dni_var]
            ghi_nc = ncobj.variables[ghi_var]
            obs_nc = ncobj.variables[obs_var]

        # Add observation time layers for any missed dates
        for mi, md in enumerate(missed_dates):
            if debug: print "Add missed:", md
            obs_data = create_obs_time(latvec, lonvec, md)
            #nb.nc_add_data(ncobj,obs_var,obs_data,
            #               index=(slice(mi,None,None),latslice,lonslice))
            obs_nc[mi, :, :] = obs_data
            missed_dates = []

        # Calculate the observation time layer
        obs_data = create_obs_time(latvec, lonvec, dt)
        obs_nc[dti, :, :] = obs_data

        # Add data
        if dni is not None:
            #nb.nc_add_data(ncobj,dni_var,dni_data,index=index)
            if debug: print "Add dni:", dni
            dni_nc[dti, :, :] = dni_data
        if ghi is not None:
            if debug: print "Add ghi:", ghi
            #nb.nc_add_data(ncobj,ghi_var,ghi_data,index=index)
            ghi_nc[dti, :, :] = ghi_data

    # Add time values
    nb.nc_add_time(ncobj, dates)

    # Create an ordered metadata dictionary
    meta = create_meta(dates[0], dates[-1], adict)

    # Output the file-specific metadata to a JSON file regardless of the
    # requested output formats
    jh.json_dump(meta, froot + '.json')

    # Setting attributes here is optional in this example because it is
    # known that netcdf_json_wrapper.py will be called next with
    # 'froot+".json"' as one of the input files.
    nb.nc_set_attributes(ncobj, meta)
    if debug: print "Added attributes"
    nb.nc_close(ncobj)
    print "Wrote: " + froot + ".nc"
    return froot