Beispiel #1
0
def PlotFactors(mode):
    Daily = pygeode.open(Diurnal_Factors).diurnal_scale_factors
    Weekly = pygeode.open(Weekly_Factors).weekly_scale_factors
    if mode == 'diurnal':
        Data = Daily[:]
        smin = 0.8
        smax = 1.2
        TitleTime = DateTime
        tUnit = 'h UTC'
    elif mode == 'weekly':
        Data = Weekly[:]
        smin = 0.95
        smax = 1.05
        Data = Weekly_Array[:, :, DateTime]
        tUnit = ''
        TitleTime = [
            'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday',
            'Sunday'
        ][DateTime]
    else:
        print 'Invalid mode. Use "diurnal" or "weekly"'
        return
    x_axis = numpy.arange(-180., 180., 0.25)
    y_axis = numpy.arange(-90., 90., 0.25)
    fig, ax = pyplot.subplots()
    cax = ax.pcolormesh(x_axis,
                        y_axis,
                        Data,
                        cmap=cm.coolwarm,
                        vmin=smin,
                        vmax=smax)
    ax.set_title('TIMES {0} Scale Factors for {1}{2}'.format(
        mode, TitleTime, tUnit))
    ax.set_xlim(-180, 180)
    ax.set_ylim(-90, 90)
    ax.set_xlabel('Longitude')
    ax.set_ylabel('Latitude')
    cbar = fig.colorbar(cax, orientation='horizontal')
    pyplot.savefig('Times{0}.png'.format(mode))
Beispiel #2
0
def WeeklyFactor(time):
    """Reads the TIMES weekly factor for a Time instance.
    
    Args: PST.Time instance
    Returns: TIMES weekly scale factor
    """
    lat = time.lat
    lon = time.lon
    day = time.solar.weekday()
    Weekly = pygeode.open(Weekly_Factors).weekly_scale_factors[:]
    lat_row = (lat + 90) // 0.25
    lon_col = (lon + 180) / 0.25
    weekly = Weekly[lat_row, lon_col, day]
    return weekly
Beispiel #3
0
def DailyFactor(time):
    """Reads the daily TIMES scale factor for a Time instance.
    
    Uses lat/lon and solar time stored in time to get the TIMES
    daily factor for the right lat/lon position and right
    time.
    
    Args:
        A PST.Time instance
    
    Returns:
        The TIMES daily scale factor
    """
    lat = time.lat
    lon = time.lon
    hour = time.solar.hour
    Daily = pygeode.open(Diurnal_Factors).diurnal_scale_factors[:]
    lat_row = (lat + 90) // 0.25
    lon_col = (lon + 180) // 0.25
    diurnal = Daily[lat_row, lon_col, hour]
    return diurnal
Beispiel #4
0
def plotFSTs(season=season, spcs=spcs, spcsFiles=spcsFiles, outputName = outputName, saveDir=saveDir):
    # print minimum outputs
    rmn.fstopt(rmn.FSTOP_MSGLVL,rmn.FSTOPI_MSG_CATAST)

    mInds = []
    for m in season:
        mInds += [monthList.index(m)]

    if os.path.exists(saveDir) == False:
        nu.makeDir(saveDir)

    for spcInd, nomvar in enumerate(spcs):
        try:
            filename = os.path.join(saveDir, 'output_file_{0}_{1}.fst'.format(outputName, nomvar))
            print('Creating and saving to {}'.format(filename))
            tmp = open(filename, 'w+'); tmp.close()

            output_file = filename
            file_id = rmn.fnom(output_file)
            open_fst = rmn.fstouv(file_id, rmn.FST_RW)
            open_file = spcsFiles[spcInd]
            print "Parameter: " + nomvar
            seaSpcData = get_var(pyg.open(open_file), nomvar, mInds)
            nc_lnsp = pyg.open(lnsp_file)
            pressures = get_pressures(nc_lnsp, mInds)

            timelen, levlen, latlen, lonlen = seaSpcData.shape
            #NOTE: uncomment the following three lines to prep data for basemap use
            #lonShiftSSData = shift_lon(seaSpcData)
            #vertInterpSSData = vert_interp(pressures, lonShiftSSData)
            #meanSSData = np.mean(vertInterpSSData, axis=0)
            #NOTE: uncommment the following four liness to use for fst plotting
            vertInterpSSData = vert_interp(pressures, seaSpcData)
            meanSSData = np.mean(vertInterpSSData, axis=0)  # temp
            for lvl, ray in enumerate(meanSSData):
                meanSSData[lvl] = np.flipud(ray)
            scaleFac = scaleSpcs[allSpcs.index(nomvar)]
            scaledSSData = meanSSData*scaleFac

            #define grid for this file - note that the MACC grid in the file is
            #defined for lons -180 to 180, but the python defGrid_L can't deal
            #with that and defines the grid from 0 to 360 so will have to reorder
            #the MACC fields a bit, or they end up 180 deg out of phase
            # Also necessary to add one more longitude to wrap around
            dlatlon = 360./lonlen   # this is equal to the resolution of the grid

            params0 = {
                    'grtyp' : 'Z',
                    'grref' : 'L',
                    'nj'    : latlen,
                    'ni'    : lonlen,
                    'lat0'  : -90.,
                    'lon0'  : 0.,
                    'dlat'  : dlatlon,
                    'dlon'  : dlatlon
                    }

            MACC_grid= rmn.encodeGrid(params0)
            print("Grids created.")
            print 'Grid Shape:' + str(MACC_grid['shape'])

            # copies the default record
            new_record = rmn.FST_RDE_META_DEFAULT.copy()
            tic_record = rmn.FST_RDE_META_DEFAULT.copy()
            tac_record = rmn.FST_RDE_META_DEFAULT.copy()

            try:
                rmn.writeGrid(file_id, MACC_grid)

                tac = rmn.fstinl(file_id, nomvar='>>')[0]
                tic = rmn.fstinl(file_id, nomvar='^^')[0]

                tic_record.update(rmn.fstprm(tic))
                tac_record.update(rmn.fstprm(tac))

                tic_record.update({'datyp' : rmn.FST_DATYP_LIST['float']})
                tac_record.update({'datyp' : rmn.FST_DATYP_LIST['float']})

                rmn.fsteff(tic)
                rmn.fsteff(tac)

                tic_record.update({'d': MACC_grid['ay']})
                tac_record.update({'d': MACC_grid['ax']})
                toc_record = vgd.vgd_new_pres(const_pressure, ip1=MACC_grid['ig1'], ip2=MACC_grid['ig2'])

                rmn.fstecr(file_id, tic_record)  # write the dictionary record to the file as a new record
                rmn.fstecr(file_id, tac_record)  # write the dictionary record to the file as a new record
                vgd.vgd_write(toc_record, file_id)

            except:
                raise

            for rp1 in xrange(len(const_pressure)):  # writes a record for every level (as a different ip1)
                try:
                    # converts rp1 into a ip1 with pressure kind
                    ip1 = rmn.convertIp(rmn.CONVIP_ENCODE, const_pressure[rp1], rmn.KIND_PRESSURE)
                    new_record.update(MACC_grid)
                    new_record.update({  # Update with specific meta
                        'nomvar': nomvar,
                        'typvar': 'C',
                        'etiket': 'MACCRean',
                        'ni'    : MACC_grid['ni'],
                        'nj'    : MACC_grid['nj'],
                        'ig1'   : tic_record['ip1'],
                        'ig2'   : tic_record['ip2'],
                        'ig3'   : tic_record['ip3'],
                        'ig4'   : tic_record['ig4'],
                        'dateo' : rmn.newdate(rmn.NEWDATE_PRINT2STAMP, 20120101, 0000000),
                        'deet'  : 0,  # Timestep in sec
                        'ip1'   : ip1
                        })

                    #tmp_nparray = np.asfortranarray(monthly_mean[rp1])
                    tmp = scaledSSData[rp1]
                    tmp = np.transpose(tmp)
                    # data array is structured as tmp = monthly_mean[level] where monthly_mean is [level, lat, lon]
                    new_record.update({'d': tmp.astype(np.float32)}) # Updates with data array in the form (lon x lat)

                    print "Defined a new record with dimensions ({0}, {1})".format(new_record['ni'], new_record['nj'])
                    rmn.fstecr(file_id, new_record)  # write the dictionary record to the file as a new record

                except:
                    #rmn.closeall(file_id)
                    rmn.fstfrm(file_id)
                    rmn.fclos(file_id)
                    raise
            rmn.fstfrm(file_id)
            rmn.fclos(file_id)
            print('{} complete~'.format(filename))
        except:
            rmn.fstfrm(file_id)
            rmn.fclos(file_id)
            raise
    print('Finished plotting all FSTs. ')
Beispiel #5
0
def test_scalar_from_netcdf():
  import pygeode as pyg
  # read from netcdf and access data
  v = pyg.open('test_issue_108.nc').scalar

  assert v[()] == 10.
Beispiel #6
0
def test_scalar_from_netcdf():
    import pygeode as pyg
    # read from netcdf and access data
    v = pyg.open('test_issue_108.nc').scalar

    assert v[()] == 10.
            return open_var[1216:1336]
        elif m_int == 11:
            return open_var[1336:]


##### MAIN #####
month_list = [
    '01JAN', '02FEB', '03MAR', '04APR', '05MAY', '06JUN', '07JLY', '08AUG',
    '09SEP', '10OCT', '11NOV', '12DEC'
]
# this portion of the code handles parsing values from the nc file
for year_int, filename in enumerate(filenames):
    to3_list = []
    so3_list = []
    go3_list = []
    nc = pyg.open(filename)
    lnsp_file = pyg.open(lnsp_files[year_int])
    for m_int, month in enumerate(month_list):
        if m_int < 3 or m_int > 5:
            continue
        date_tuple = (year_int, month)
        strato_file = '/home/ords/aq/alh002/pyscripts/workdir/pv_files/strato_coords_{0}_{1}.txt'.format(
            year_int, month)
        tropo_file = '/home/ords/aq/alh002/pyscripts/workdir/pv_files/tropo_coords_{0}_{1}.txt'.format(
            year_int, month)

        # all instances of '[:4]' are to limit to 4 timesteps, or 1 day (in this case 01012012)
        #        uu = nc.u
        #        vv = nc.v
        #        qq = nc.vo
        #        th = nc.t