コード例 #1
1
ファイル: ioos.py プロジェクト: jbosch-noaa/notebooks_demos
def ndbc2df(collector, ndbc_id):
    """
    Ugly hack because `collector.raw(responseFormat="text/csv")`
    Usually times out.

    """
    from netCDF4 import MFDataset, date2index, num2date
    # FIXME: Only sea_water_temperature for now.
    if len(collector.variables) > 1:
        msg = "Expected only 1 variables to download, got {}".format
        raise ValueError(msg(collector.variables))
    if collector.variables[0] == 'sea_water_temperature':
        columns = 'sea_water_temperature (C)'
        ncvar = 'sea_surface_temperature'
        data_type = 'stdmet'
        # adcp, adcp2, cwind, dart, mmbcur, ocean, oceansites, pwind,
        # swden, tao-ctd, wlevel, z-hycom
    else:
        msg = "Do not know how to download {}".format
        raise ValueError(msg(collector.variables))

    uri = 'http://dods.ndbc.noaa.gov/thredds/dodsC/data/{}'.format(data_type)
    url = ('%s/%s/' % (uri, ndbc_id))
    urls = url_lister(url)

    filetype = "*.nc"
    file_list = [filename for filename in fnmatch.filter(urls, filetype)]
    files = [fname.split('/')[-1] for fname in file_list]
    urls = ['%s/%s/%s' % (uri, ndbc_id, fname) for fname in files]

    if not urls:
        raise Exception("Cannot find data at {!r}".format(url))
    nc = MFDataset(urls)

    kw = dict(calendar='gregorian', select='nearest')
    time_dim = nc.variables['time']
    time = num2date(time_dim[:], units=time_dim.units,
                    calendar=kw['calendar'])

    idx_start = date2index(collector.start_time.replace(tzinfo=None),
                           time_dim, **kw)
    idx_stop = date2index(collector.end_time.replace(tzinfo=None),
                          time_dim, **kw)
    if idx_start == idx_stop:
        raise Exception("No data within time range"
                        " {!r} and {!r}".format(collector.start_time,
                                                collector.end_time))
    data = nc.variables[ncvar][idx_start:idx_stop, ...].squeeze()

    time_dim = nc.variables['time']
    time = time[idx_start:idx_stop].squeeze()
    df = pd.DataFrame(data=data, index=time, columns=[columns])
    df.index.name = 'date_time'
    return df
コード例 #2
0
ファイル: nctools.py プロジェクト: liuy0813/HyosPy
def get_tindex(t,start_date,end_date,stride=None):
        
    tindex = []
    tindex.append(date2index(start_date,t,select='before'))
    tindex.append(date2index(end_date,t,select='after') + 1)
    if stride is None:
        tindex.append(1)
    else:
        tindex.append(stride)
    return tindex
コード例 #3
0
ファイル: surf_vel.py プロジェクト: rsignell-usgs/ocean_map
def surf_vel(x,y,url,date_mid=datetime.datetime.utcnow(),
    uvar='u',vvar='v',isurf_layer=0,lonvar='lon',latvar='lat',
    tvar='time',hours_ave=24,lon360=False,ugrid=False,lonlat_sub=1,time_sub=1):
            
    nc=netCDF4.Dataset(url)
    lon = nc.variables[lonvar][:]-360.*lon360
    lat = nc.variables[latvar][:]
    
    if ugrid:
        lon2d=lon
        lat2d=lat
    elif lon.ndim==1:
        # ai and aj are logical arrays, True in subset region
        igood = np.where((lon>=x.min()) & (lon<=x.max()))
        jgood = np.where((lat>=y.min()) & (lat<=y.max()))
        bi=np.arange(igood[0].min(),igood[0].max(),lonlat_sub)
        bj=np.arange(jgood[0].min(),jgood[0].max(),lonlat_sub)
        [lon2d,lat2d]=np.meshgrid(lon[bi],lat[bj]) 
    elif lon.ndim==2:
        igood=np.where(((lon>=x.min())&(lon<=x.max())) & ((lat>=y.min())&(lat<=y.max())))
        bj=np.arange(igood[0].min(),igood[0].max(),lonlat_sub)
        bi=np.arange(igood[1].min(),igood[1].max(),lonlat_sub)
        lon2d=nc.variables[lonvar][bj,bi]
        lat2d=nc.variables[latvar][bj,bi]
    else:
        print 'uh oh'
        
    #desired_stop_date=datetime.datetime(2011,9,9,17,00)  # specific time (UTC)
    desired_stop_date=date_mid+datetime.timedelta(0,3600.*hours_ave/2.)  
    istop = netCDF4.date2index(desired_stop_date,nc.variables[tvar],select='nearest')   
    actual_stop_date=netCDF4.num2date(nc.variables[tvar][istop],nc.variables[tvar].units)
    actual_date_mid_est=actual_stop_date-datetime.timedelta(0,3600.*hours_ave/2.+3600.*5)
    start_date=actual_stop_date-datetime.timedelta(0,3600.*hours_ave)
    istart = netCDF4.date2index(start_date,nc.variables[tvar],select='nearest')
    print(date_mid.strftime('Requested mid-date: %I:00 %p on %B %d, %Y'))     
    print(actual_date_mid_est.strftime('Returned mid-date (EST): %I:00 %p on %B %d, %Y')) 
    print(start_date.strftime('start: %I:00 %p on %B %d, %Y'))   
    print(actual_stop_date.strftime('stop: %I:00 %p on %B %d, %Y'))
    if ugrid:
        u1=np.mean(nc.variables[uvar][istart:istop:time_sub,isurf_layer,:],axis=0)
        v1=np.mean(nc.variables[vvar][istart:istop:time_sub,isurf_layer,:],axis=0)
    else:
        print('reading u...')
        u1=np.mean(nc.variables[uvar][istart:istop:time_sub,isurf_layer,bj,bi],axis=0)
        print('reading v...')
        v1=np.mean(nc.variables[vvar][istart:istop:time_sub,isurf_layer,bj,bi],axis=0)

    xx2,yy2=np.meshgrid(x,y)
    ui=scipy.interpolate.griddata((lon2d.flatten(),lat2d.flatten()),u1.flatten(),(xx2,yy2),method='linear',fill_value=0.0)
    vi=scipy.interpolate.griddata((lon2d.flatten(),lat2d.flatten()),v1.flatten(),(xx2,yy2),method='linear',fill_value=0.0)
    ui[np.isnan(ui)]=0.0
    vi[np.isnan(vi)]=0.0

    
    return ui,vi,actual_date_mid_est
コード例 #4
0
ファイル: bin_wind.py プロジェクト: rsignell-usgs/notebook
def get_nam_ts(url,vname,start=None,stop=None,j=None,i=None):
    nc = netCDF4.Dataset(url)
    ncv = nc.variables
    time_var = ncv['time']
    dtime = netCDF4.num2date(time_var[:],time_var.units)
    istart = netCDF4.date2index(start,time_var,select='nearest')
    istop = netCDF4.date2index(stop,time_var,select='nearest')
    var = ncv[vname]
    v = var[istart:istop,j,i]
    tim = dtime[istart:istop]
    return v,tim
コード例 #5
0
def surf_vel_roms(x,y,url,date_mid=datetime.datetime.utcnow,hours_ave=24,tvar='ocean_time',lonlat_sub=1,time_sub=6):
    #url = 'http://testbedapps-dev.sura.org/thredds/dodsC/alldata/Shelf_Hypoxia/tamu/roms/tamu_roms.nc'

    #url='http://tds.ve.ismar.cnr.it:8080/thredds/dodsC/field2_test/run1/his'
    #####################################################################################

    nc = netCDF4.Dataset(url)
    mask = nc.variables['mask_rho'][:]
    lon_rho = nc.variables['lon_rho'][:]
    lat_rho = nc.variables['lat_rho'][:]
    anglev = nc.variables['angle'][:]

    desired_stop_date = date_mid+datetime.timedelta(0,3600.*hours_ave/2.)  # specific time (UTC)
    istop = netCDF4.date2index(desired_stop_date,nc.variables[tvar],select='nearest')   
    actual_stop_date=netCDF4.num2date(nc.variables[tvar][istop],nc.variables[tvar].units)   
    start_date=actual_stop_date-datetime.timedelta(0,3600.*hours_ave)
    istart = netCDF4.date2index(start_date,nc.variables[tvar],select='nearest') 

    uvar='u'
    vvar='v'
    isurf_layer = -1
    print('reading u...')
    u=np.mean(nc.variables[uvar][istart:istop:time_sub,isurf_layer,:,:],axis=0)
    print('reading v...')
    v=np.mean(nc.variables[vvar][istart:istop:time_sub,isurf_layer,:,:],axis=0)
    print('done reading data...')
    u = roms_utils.shrink(u, mask[1:-1, 1:-1].shape)
    v = roms_utils.shrink(v, mask[1:-1, 1:-1].shape)

    u, v = roms_utils.rot2d(u, v, anglev[1:-1, 1:-1])


    # <codecell>

    lon=lon_rho[1:-1,1:-1]
    lat=lat_rho[1:-1,1:-1]


    # <codecell>


    # <codecell>

    xx2,yy2=np.meshgrid(x,y)
    print('interpolating u to uniform grid...')
    ui=scipy.interpolate.griddata((lon.flatten(),lat.flatten()),u.flatten(),(xx2,yy2),method='linear',fill_value=0.0)
    print('interpolating v to uniform grid...')
    vi=scipy.interpolate.griddata((lon.flatten(),lat.flatten()),v.flatten(),(xx2,yy2),method='linear',fill_value=0.0)
    ui[np.isnan(ui)]=0.0
    vi[np.isnan(vi)]=0.0

    
    return ui,vi
コード例 #6
0
ファイル: prism2raster.py プロジェクト: scw/dap2arc
def mean_precip(nc,bbox=None,start=None,stop=None):
    lon=nc.variables['lon'][:]
    lat=nc.variables['lat'][:]
    tindex0=netCDF4.date2index(start,nc.variables['time'],select='nearest')
    tindex1=netCDF4.date2index(stop,nc.variables['time'],select='nearest')
    bi=(lon>=box[0])&(lon<=box[2])
    bj=(lat>=box[1])&(lat<=box[3])
    p=nc.variables['precip_mean'][tindex0:tindex1,bj,bi]
    latmin=np.min(lat[bj])
    p=np.mean(p,axis=0)
    lon=lon[bi]
    lat=lat[bj]
    return p,lon,lat
コード例 #7
0
    def test_nonuniform(self):
        """Check that the fallback mechanism works. """
        nutime = self.TestTime(datetime(1950, 1, 1), 366, 24, "hours since 1900-01-01", "standard")

        # Let's remove the second entry, so that the computed stride is not
        # representative and the bisection method is needed.
        nutime._data = nutime._data[numpy.r_[0, slice(2, 200)]]

        t = date2index(datetime(1950, 2, 1), nutime)
        assert_equal(t, 30)

        t = date2index([datetime(1950, 2, 1), datetime(1950, 2, 3)], nutime)
        assert_equal(t, [30, 32])
コード例 #8
0
    def test_select_dummy(self):
        nutime = self.TestTime(datetime(1950, 1, 1), 366, 24, "hours since 1400-01-01", "standard")

        dates = [datetime(1950, 1, 2, 6), datetime(1950, 1, 3), datetime(1950, 1, 3, 18)]

        t = date2index(dates, nutime, select="before")
        assert_equal(t, [1, 2, 2])

        t = date2index(dates, nutime, select="after")
        assert_equal(t, [2, 2, 3])

        t = date2index(dates, nutime, select="nearest")
        assert_equal(t, [1, 2, 3])
コード例 #9
0
    def test_select_dummy(self):
        nutime = self.TestTime(datetime(1950, 1, 1), 366, 24,
                               'hours since 1400-01-01', 'standard')

        dates = [datetime(1950, 1, 2, 6), datetime(
            1950, 1, 3), datetime(1950, 1, 3, 18)]

        t = date2index(dates, nutime, select='before')
        assert_equal(t, [1, 2, 2])

        t = date2index(dates, nutime, select='after')
        assert_equal(t, [2, 2, 3])

        t = date2index(dates, nutime, select='nearest')
        assert_equal(t, [1, 2, 3])
コード例 #10
0
ファイル: virtualOS.py プロジェクト: edwinkost/edwin_scratch
def netcdf2PCRobj(ncFile,varName,dateInput):
    # EHS (04 APR 2013): To convert netCDF (tss) file to PCR file.
    # The cloneMap is globally defined (outside this method).
    
    # Get netCDF file and variable name:
    f = nc.Dataset(ncFile)
    varName = str(varName)

    # date
    date = dateInput
    if isinstance(date, str) == True: date = \
                    datetime.datetime.strptime(str(date),'%Y-%m-%d') 
    date = datetime.datetime(date.year,date.month,date.day)
    
    # time index (in the netCDF file)
    nctime = f.variables['time']  # A netCDF time variable object.
    idx = nc.date2index(date, nctime, calendar=nctime.calendar, \
                                                 select='exact') 
    
    # convert to PCR object and close f
    outPCR = pcr.numpy2pcr(pcr.Scalar,(f.variables[varName][idx].data), \
                             float(f.variables[varName]._FillValue))
    f.close(); f = None ; del f
    # PCRaster object
    return (outPCR)
コード例 #11
0
    def runTest(self):
        # Get the real dates
        # skip this until cftime pull request #55 is in a released
        # version (1.0.1?). Otherwise, fix for issue #808 breaks this
        if parse_version(cftime.__version__) >= parse_version('1.0.1'):
            dates = []
            for file in self.files:
                f = Dataset(file)
                t = f.variables['time']
                dates.extend(num2date(t[:], t.units, t.calendar))
                f.close()

        # Compare with the MF dates
        f = MFDataset(self.files,check=True)
        t = f.variables['time']
        mfdates = num2date(t[:], t.units, t.calendar)

        T = MFTime(t)
        assert_equal(len(T), len(t))
        assert_equal(T.shape, t.shape)
        assert_equal(T.dimensions, t.dimensions)
        assert_equal(T.typecode(), t.typecode())
        # skip this until cftime pull request #55 is in a released
        # version (1.0.1?). Otherwise, fix for issue #808 breaks this
        if parse_version(cftime.__version__) >= parse_version('1.0.1'):
            assert_array_equal(num2date(T[:], T.units, T.calendar), dates)
        assert_equal(date2index(datetime.datetime(1980, 1, 2), T), 366)
        f.close()
コード例 #12
0
ファイル: time_series.py プロジェクト: sebhahn/pynetCF
    def get_time_variable_overlap(self, dates):
        """Figure out if a new date array has a overlap with the already existing time
        variable.

        Return the index of the existing time variable where the new dates
        should be located.

        At the moment this only handles cases where all dates are new or none
        are new.

        Parameters
        ----------
        dates: list
            list of datetime objects


        Returns
        -------
        indexes: np.ndarray
           Array of indexes that overlap

        """
        timevar = self.dataset.variables[self.time_var]
        if timevar.size == 0:
            indexes = np.array([0])
        else:
            try:
                indexes = netCDF4.date2index(
                    dates, timevar)
            except ValueError:
                indexes = np.array([timevar.size])

        return indexes
コード例 #13
0
def getFVCOM_bottom_tempsalt_netcdf(lati,loni,starttime,endtime,layer,vname):#vname='temp'or'salinity'
        '''
        Function written by Yacheng Wang
        generates model data as a DataFrame
        according to time and local position
        different from getFVCOM_bottom_temp:
        this function only return time-temp dataframe and ues netcdf4
        getFVCOM_bottom_temp return depth and temp
        '''
        urlfvcom = 'http://www.smast.umassd.edu:8080/thredds/dodsC/fvcom/hindcasts/30yr_gom3'
        nc = netCDF4.Dataset(urlfvcom)
        nc.variables
        lat = nc.variables['lat'][:]
        lon = nc.variables['lon'][:]
        times = nc.variables['time']
        jd = netCDF4.num2date(times[:],times.units)
        var = nc.variables[vname]
        inode = nearlonlat(lon,lat,loni,lati)
        modindex=netCDF4.date2index([starttime.replace(tzinfo=None),endtime.replace(tzinfo=None)],times,select='nearest')
        modtso = pd.DataFrame()
        # CHUNK THROUGH 'XDAYS' AT A TIME SINCE IT WAS HANGING UP OTHERWISE
        xdays=100
        for k in range(0,(endtime-starttime).days*24,xdays*24):
          print 'Generating a dataframe of model data requested from '+str(k)+' to ',str(k+xdays*24)
          #modtso=pd.DataFrame(var[modindex[0]:modindex[1],layer,inode],index=jd[modindex[0]:modindex[1]])
          modtso1=pd.DataFrame(var[modindex[0]+k:modindex[0]+k+xdays*24,layer,inode],index=jd[modindex[0]+k:modindex[0]+k+xdays*24])
          modtso=pd.concat([modtso,modtso1])
        return modtso
コード例 #14
0
def getFVCOM_bottom_tempsaltvel_netcdf(lati,loni,starttime,endtime,layer,vname):#vname='temp'or'salinity'
        '''
        Function written by Yacheng Wang
        generates model data as a DataFrame
        according to time and local position
        different from getFVCOM_bottom_temp:
        this function only return time-temp dataframe and ues netcdf4
        getFVCOM_bottom_temp return depth and temp
        '''
        urlfvcom = 'http://www.smast.umassd.edu:8080/thredds/dodsC/fvcom/hindcasts/30yr_gom3'
        nc = netCDF4.Dataset(urlfvcom)
        nc.variables
        times = nc.variables['time']
        jd = netCDF4.num2date(times[:],times.units)
        if vname=='vel':
            u = nc.variables['u']
            v = nc.variables['v']
            lat = nc.variables['latc'][:]
            lon = nc.variables['lonc'][:]
        else:
            var=nc.variables[vname]
            lat = nc.variables['lat'][:]
            lon = nc.variables['lon'][:]
        inode = nearlonlat(lon,lat,loni,lati)
        modindex=netCDF4.date2index([starttime.replace(tzinfo=None),endtime.replace(tzinfo=None)],times,select='nearest')
        print modindex
        #print [u[modindex[0]:modindex[1],layer,inode][0],v[modindex[0]:modindex[1],layer,inode][0]]
        if vname=='vel':
            modtso=pd.DataFrame(np.array([u[modindex[0]:modindex[1],layer,inode],v[modindex[0]:modindex[1],layer,inode]]).T,index=jd[modindex[0]:modindex[1]])
            #modtso=pd.DataFrame(np.array([u[modindex[0],layer,inode],v[modindex[0],layer,inode]]).T,index=jd[modindex[0]])
        else:
            modtso=pd.DataFrame(var[modindex[0]:modindex[1],layer,inode],index=jd[modindex[0]:modindex[1]])
        return modtso
コード例 #15
0
ファイル: pytools.py プロジェクト: jbosch-noaa/utilities
def get_roms(url, time_slice, n=3):
    url = parse_url(url)
    with Dataset(url) as nc:
        ncv = nc.variables
        time = ncv['ocean_time']
        tidx = date2index(time_slice, time, select='nearest')
        time = num2date(time[tidx], time.units, time.calendar)

        mask = ncv['mask_rho'][:]
        lon_rho = ncv['lon_rho'][:]
        lat_rho = ncv['lat_rho'][:]
        anglev = ncv['angle'][:]

        u = ncv['u'][tidx, -1, ...]
        v = ncv['v'][tidx, -1, ...]

        u = shrink(u, mask[1:-1, 1:-1].shape)
        v = shrink(v, mask[1:-1, 1:-1].shape)

        u, v = rot2d(u, v, anglev[1:-1, 1:-1])

        lon = lon_rho[1:-1, 1:-1]
        lat = lat_rho[1:-1, 1:-1]

        u, v = u[::n, ::n], v[::n, ::n]
        lon, lat = lon[::n, ::n], lat[::n, ::n]

        u = ma.masked_invalid(u)
        v = ma.masked_invalid(v)
    return dict(lon=lon, lat=lat, u=u, v=v, time=time)
コード例 #16
0
 def test_singletime(self):
     # issue 215 test (date2index with time variable length == 1)
     f = Dataset(self.file)
     time2 = f.variables['time2']
     result_index = date2index(self.first_timestamp, time2, select="exact")
     assert_equal(result_index, 0)
     f.close()
コード例 #17
0
def main(O3File, MeteoMask):
#    
#    

    #If a file with data on ozone profiles already contains information on the 
    #height of the tropopause - just updated it, otherwise - create a new variable.
    FO3 = ncdf.Dataset(O3File,'r+')
    FMeteo = ncdf.MFDataset(MeteoMask)
    
    if not 'HTropo' in FO3.variables:
        var=FO3.createVariable('HTropo','float32',('Time',), zlib=True, complevel=9, fill_value=np.nan)
        var.units = 'm.'
        var.description = 'Height of the tropopause.'
    else:
        var=FO3.variables['HTropo']
    
    TimeO3 = FO3.variables['Time']
    TimeMet = ncdf.MFTime(FMeteo.variables['Time'])
    
    dtTimeO3 = ncdf.num2date(TimeO3, TimeO3.units, TimeO3.calendar)
    idxMet = ncdf.date2index(dtTimeO3, TimeMet, calendar=TimeMet.calendar, select='nearest')
    HTropo = FMeteo.variables['HTropo'][idxMet,0]
    
    var[...] = HTropo
    
    
    
    FO3.close()
    FMeteo.close()
    return 0
コード例 #18
0
ファイル: utilities.py プロジェクト: duncombe/system-test
def get_ncfiles_catalog(station_id, jd_start, jd_stop):
    station_name = station_id.split(":")[-1]
    uri = 'http://dods.ndbc.noaa.gov/thredds/dodsC/data/stdmet'
    url = ('%s/%s/' % (uri, station_name))
    urls = _url_lister(url)
    filetype = "*.nc"
    file_list = [filename for filename in fnmatch.filter(urls, filetype)]
    files = [fname.split('/')[-1] for fname in file_list]
    urls = ['%s/%s/%s' % (uri, station_name, fname) for fname in files]

    try:
        nc = MFDataset(urls)

        time_dim = nc.variables['time']
        calendar = 'gregorian'
        idx_start = date2index(jd_start, time_dim, calendar=calendar,
                               select='nearest')
        idx_stop = date2index(jd_stop, time_dim, calendar=calendar,
                              select='nearest')

        dir_dim = np.array(nc.variables['wind_dir'][idx_start:idx_stop, 0, 0], dtype=float)
        speed_dim = np.array(nc.variables['wind_spd'][idx_start:idx_stop, 0, 0])
        # Replace fill values with NaN
        speed_dim[speed_dim == nc.variables['wind_spd']._FillValue] = np.nan

        if dir_dim.ndim != 1:
            dir_dim = dir_dim[:, 0]
            speed_dim = speed_dim[:, 0]
        time_dim = nc.variables['time']
        dates = num2date(time_dim[idx_start:idx_stop],
                         units=time_dim.units,
                         calendar='gregorian').squeeze()
        mask = np.isfinite(speed_dim)

        data = dict()
        data['wind_speed (m/s)'] = speed_dim[mask]
        data['wind_from_direction (degree)'] = dir_dim[mask]
        time = dates[mask]

        # columns = ['wind_speed (m/s)',
        #            'wind_from_direction (degree)']
        df = DataFrame(data=data, index=time)
        return df
    except Exception as e:
        print str(e)
        df = DataFrame()
        return df
コード例 #19
0
 def test_failure(self):
     nutime = self.TestTime(datetime(1950, 1, 1), 366, 24, "hours since 1900-01-01", "standard")
     try:
         t = date2index(datetime(1949, 2, 1), nutime)
     except ValueError:
         pass
     else:
         raise ValueError("This test should have failed.")
def findNCDTInd(target, nc):
    ds = netCDF4.Dataset(nc)

    time_var = ds.variables['time']
    try:
        ind = netCDF4.date2index(target, time_var)
    except ValueError:
        return None, None
    else:
        return ds, ind
コード例 #21
0
ファイル: netcdf2PCraster.py プロジェクト: navass11/SPHY
def netcdf2pcrTimeIdx(
    self, pcr, forcing
):  #ncFile, varName, dateInput, method, factor, x, y, xi, yi, xIdxSta, xIdxEnd, yIdxSta, yIdxEnd):
    #-read netcdf file
    f = nc.Dataset(getattr(self, forcing + 'NC'))
    filecache[getattr(self, forcing + 'NC')] = f

    #-get index from netcdf corresponding with current date
    setattr(self, forcing + 'TimeIdx',
            nc.date2index(self.curdate, f.variables['time'], select='exact'))
コード例 #22
0
 def test_issue444(self):
     # make sure integer overflow not causing error in
     # calculation of nearest index when sum of adjacent
     # time values won't fit in 32 bits.
     ntimes = 20
     f = Dataset(self.file, 'r')
     query_time = datetime(2037, 1, 3, 21, 12)
     index = date2index(query_time, f.variables['time3'], select='nearest')
     assert(index == 11)
     f.close()
コード例 #23
0
 def test_issue444(self):
     # make sure integer overflow not causing error in
     # calculation of nearest index when sum of adjacent
     # time values won't fit in 32 bits.
     ntimes = 20
     f = Dataset(self.file, 'r')
     query_time = datetime(2037, 1, 3, 21, 12)
     index = date2index(query_time, f.variables['time3'], select='nearest')
     assert (index == 11)
     f.close()
コード例 #24
0
def read_ecmwf_macc(filename, latitude, longitude, utc_time_range=None):
    """
    Read data from ECMWF MACC reanalysis netCDF4 file.

    Parameters
    ----------
    filename : string
        full path to netCDF4 data file.
    latitude : float
        latitude in degrees
    longitude : float
        longitude in degrees
    utc_time_range : sequence of datetime.datetime
        pair of start and end naive or UTC date-times

    Returns
    -------
    data : pandas.DataFrame
        dataframe for specified range of UTC date-times
    """
    ecmwf_macc = ECMWF_MACC(filename)
    try:
        ilat, ilon = ecmwf_macc.get_nearest_indices(latitude, longitude)
        nctime = ecmwf_macc.data['time']
        if utc_time_range:
            start_idx = netCDF4.date2index(
                utc_time_range[0], nctime, select='before')
            end_idx = netCDF4.date2index(
                utc_time_range[-1], nctime, select='after')
            time_slice = slice(start_idx, end_idx + 1)
        else:
            time_slice = slice(0, ecmwf_macc.time_size)
        times = netCDF4.num2date(nctime[time_slice], nctime.units)
        df = {k: ecmwf_macc.data[k][time_slice, ilat, ilon]
              for k in ecmwf_macc.keys}
        if ECMWF_MACC.TCWV in df:
            # convert total column water vapor in kg/m^2 at (1-atm, 25-degC) to
            # precipitable water in cm
            df['precipitable_water'] = df[ECMWF_MACC.TCWV] / 10.0
    finally:
        ecmwf_macc.data.close()
    return pd.DataFrame(df, index=times.astype('datetime64[s]'))
コード例 #25
0
ファイル: ecmwf_macc.py プロジェクト: anomam/pvlib-python
def read_ecmwf_macc(filename, latitude, longitude, utc_time_range=None):
    """
    Read data from ECMWF MACC reanalysis netCDF4 file.

    Parameters
    ----------
    filename : string
        full path to netCDF4 data file.
    latitude : float
        latitude in degrees
    longitude : float
        longitude in degrees
    utc_time_range : sequence of datetime.datetime
        pair of start and stop naive or UTC date-times

    Returns
    -------
    data : pandas.DataFrame
        dataframe for specified range of UTC date-times
    """
    ecmwf_macc = ECMWF_MACC(filename)
    try:
        ilat, ilon = ecmwf_macc.get_nearest_indices(latitude, longitude)
        nctime = ecmwf_macc.data['time']
        if utc_time_range:
            start_idx = netCDF4.date2index(
                utc_time_range[0], nctime, select='before')
            stop_idx = netCDF4.date2index(
                utc_time_range[-1], nctime, select='after')
            time_slice = slice(start_idx, stop_idx + 1)
        else:
            time_slice = slice(0, ecmwf_macc.time_size)
        times = netCDF4.num2date(nctime[time_slice], nctime.units)
        df = {k: ecmwf_macc.data[k][time_slice, ilat, ilon]
              for k in ecmwf_macc.keys}
        if ECMWF_MACC.TCWV in df:
            # convert total column water vapor in kg/m^2 at (1-atm, 25-degC) to
            # precipitable water in cm
            df['precipitable_water'] = df[ECMWF_MACC.TCWV] / 10.0
    finally:
        ecmwf_macc.data.close()
    return pd.DataFrame(df, index=times.astype('datetime64[s]'))
コード例 #26
0
    def test_select_nc(self):
        f = Dataset(self.file, 'r')
        nutime = f.variables['time']

        dates = [datetime(1950, 1, 2, 6), datetime(
            1950, 1, 3), datetime(1950, 1, 3, 18)]

        t = date2index(dates, nutime, select='before')
        assert_equal(t, [1, 2, 2])

        t = date2index(dates, nutime, select='after')
        assert_equal(t, [2, 2, 3])

        t = date2index(dates, nutime, select='nearest')
        assert_equal(t, [1, 2, 3])

        # Test dates outside the support with select
        t = date2index(datetime(1949, 12, 1), nutime, select='nearest')
        assert_equal(t, 0)

        t = date2index(datetime(1978, 1, 1), nutime, select='nearest')
        assert_equal(t, 365)

        # Test dates outside the support with before
        self.assertRaises(
            ValueError, date2index, datetime(1949, 12, 1), nutime, select='before')

        t = date2index(datetime(1978, 1, 1), nutime, select='before')
        assert_equal(t, 365)

        # Test dates outside the support with after
        t = date2index(datetime(1949, 12, 1), nutime, select='after')
        assert_equal(t, 0)

        self.assertRaises(
            ValueError, date2index, datetime(1978, 1, 1), nutime, select='after')
        # test microsecond and millisecond units
        unix_epoch = "milliseconds since 1970-01-01T00:00:00Z"
        d = datetime(2038, 1, 19, 3, 14, 7)
        millisecs = int(
            date2num(d, unix_epoch, calendar='proleptic_gregorian'))
        assert_equal(millisecs, (2 ** 32 / 2 - 1) * 1000)
        unix_epoch = "microseconds since 1970-01-01T00:00:00Z"
        microsecs = int(date2num(d, unix_epoch))
        assert_equal(microsecs, (2 ** 32 / 2 - 1) * 1000000)
        # test microsecond accuracy in date2num/num2date roundtrip
        # note: microsecond accuracy lost for time intervals greater
        # than about 270 years.
        units = 'microseconds since 1776-07-04 00:00:00-12:00'
        dates =\
            [datetime(1962, 10, 27, 6, 1, 30, 9001), datetime(
                1993, 11, 21, 12, 5, 25, 999), datetime(1995, 11, 25, 18, 7, 59, 999999)]
        times2 = date2num(dates, units)
        dates2 = num2date(times2, units)
        for date, date2 in zip(dates, dates2):
            assert_equal(date, date2)
        f.close()
コード例 #27
0
def get_data(start,ilayer):   # get current at layer [0 = surface, -1 = bottom]
    itime = netCDF4.date2index(start,time_var,select='nearest')
    dtime = netCDF4.num2date(time_var[itime],time_var.units)
    daystr = dtime.strftime('%Y-%b-%d %H:%M')
    u = nc['u'][itime, ilayer, :]
    v = nc['v'][itime, ilayer, :]
    t = nc['temp'][itime,ilayer,:]
#    t = 32. + t*9./5.    #convert from C to F
#    u = u*1.94  # convert m/s to knots
#   v = v*1.94  # convert m/s to knots
    return u,v,t,daystr
コード例 #28
0
ファイル: dap2arc.py プロジェクト: hetland/dap2arc
def ww2raster(url ='http://motherlode.ucar.edu/thredds/dodsC/fmrc/NCEP/WW3/Regional_US_West_Coast/NCEP-WW3-Regional_US_West_Coast_best.ncd',
    box = [-132.95925,35.442,-117.279,51.12225],
    var = 'Significant_height_of_combined_wind_waves_and_swell'):
    
    '''
    NetCDF4-Python test to read DEM data via OPeNDAP and create Arc Raster
    also tests out writing a small plot using Matplotlib
    Global: http://motherlode.ucar.edu/thredds/dodsC/fmrc/NCEP/WW3/Global/NCEP-WW3-Global_best.ncd
    West Coast: http://motherlode.ucar.edu/thredds/dodsC/fmrc/NCEP/WW3/Regional_US_West_Coast/NCEP-WW3-Regional_US_West_Coast_best.ncd
    '''
    nc = netCDF4.Dataset(url)
    print "Source name: %s" % nc.title
    lon = nc.variables['lon'][:]-360.0
    lat = nc.variables['lat'][:]
    bi = (lon>=box[0]) & (lon<=box[2])
    bj = (lat>=box[1]) & (lat<=box[3])
    
    # find time index to read
    hours_from_now = 0   # Examples: 0=>nowcast, 3 => forecast 3 hours from now, etc. 
    date = datetime.datetime.utcnow()+datetime.timedelta(0,3600*hours_from_now)  
    #date=datetime.datetime(2011,9,9,17,00)  # specific time (UTC)
    
    tindex = netCDF4.date2index(date,nc.variables['time'],select='nearest')
    z = nc.variables[var][tindex,bj,bi]
    lonmin = np.min(lon[bi])
    latmin = np.min(lat[bj])
    dx = np.diff(lon)
    dy = np.diff(lat)
    # check if dx or dy vary by more than one percent
    assert np.abs(np.ptp(dx)/np.mean(dx))<=0.01,'longitude spacing is not uniform'
    assert np.abs(np.ptp(dy)/np.mean(dy))<=0.01,'latitude spacing is not uniform'
    if dy[0]>0:  # lat increasing
        z=np.array(z[::-1,:])
    if dx[0]<0:  # lon decreasing
        z=np.array(z[:,::-1])    
    dx=np.abs(np.mean(dx))
    dy=np.abs(np.mean(dy))
    xyOrig = arcpy.Point(float(lonmin),float(latmin))

    # create Arc Raster
    arcpy.workspace  = "C:\\workspace"
    arcpy.env.overwriteOutput = True
    rasterName = "sig_ht"
    outRaster = os.path.normpath(os.path.join(arcpy.workspace,rasterName))
    print outRaster
    grid1=arcpy.NumPyArrayToRaster(z,xyOrig,dx,dy)
    grid1.save(os.path.join(arcpy.workspace,outRaster))
    strPrj = "GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID"\
             "['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],"\
             "UNIT['Degree',0.0174532925199433]]"
    arcpy.DefineProjection_management(outRaster,strPrj)
    print "Written: %s" % grid1
    arcpy.AddMessage("Written: %s" % grid1)
    nc.close()
コード例 #29
0
def nc_doy(doy, times):
    """ Finds day of year for netcdf time dimension
    :param doy: day of year netcdf variable
    :param times: half hourly times as netcdf variable
    :return:
    """
    for t in times[:]:
        day_time = nC.num2date(t, times.units)
        idx = nC.date2index(day_time, times)
        tt = day_time.timetuple()
        doy[idx] = tt.tm_yday
    return 'yay'
コード例 #30
0
    def get_timestep_netcdf(self, tstep):
        """
        Pull out a time step from the forcing files and
        place that time step into a dict

        Args:
            force:   input array of forcing variables
            tstep:   datetime time step

        Returns:
            inpt:    dictionary of forcing variable images
        """

        inpt = {}

        for f in self.force.keys():

            if isinstance(self.force[f], np.ndarray):
                # If it's a constant value then just read in the numpy array
                # pull out the value
                # ensures not a reference (especially if T_g)
                inpt[self.MAP_INPUTS[f]] = self.force[f].copy()

            else:
                # determine the index in the netCDF file

                # compare the dimensions and variables to get the variable name
                v = list(
                    set(self.force[f].variables.keys()) -
                    set(self.force[f].dimensions.keys()))
                v = [fv for fv in v if fv != 'projection'][0]

                # make sure you're in the same timezone
                if hasattr(self.force[f].variables['time'], 'time_zone'):
                    tstep_zone = tstep.astimezone(
                        pytz.timezone(
                            self.force[f].variables['time'].time_zone))
                    tstep_zone = tstep.tz_localize(None)
                else:
                    tstep_zone = tstep.tz_localize(None)

                # find the index based on the time step
                t = nc.date2index(
                    tstep_zone,
                    self.force[f].variables['time'],
                    calendar=self.force[f].variables['time'].calendar,
                    select='exact')

                # pull out the value
                inpt[self.MAP_INPUTS[f]] = \
                    self.force[f].variables[v][t, :].astype(np.float64)

        return inpt
コード例 #31
0
 def interp_data(cls, lat, lon, utc_time, data, key):
     """
     Interpolate data using nearest neighbor.
     """
     nctime = data['time']  # time
     ilat, ilon = cls.get_nearest_indices(lat, lon)
     # time index before
     before = netCDF4.date2index(utc_time, nctime, select='before')
     fbefore = data[key][before, ilat, ilon]
     fafter = data[key][before + 1, ilat, ilon]
     dt_num = netCDF4.date2num(utc_time, nctime.units)
     time_ratio = (dt_num - nctime[before]) / cls.dtime
     return fbefore + (fafter - fbefore) * time_ratio
コード例 #32
0
 def interp_data(cls, lat, lon, utc_time, data, key):
     """
     Interpolate data using nearest neighbor.
     """
     nctime = data['time']  # time
     ilat, ilon = cls.get_nearest_indices(lat, lon)
     # time index before
     before = netCDF4.date2index(utc_time, nctime, select='before')
     fbefore = data[key][before, ilat, ilon]
     fafter = data[key][before + 1, ilat, ilon]
     dt_num = netCDF4.date2num(utc_time, nctime.units)
     time_ratio = (dt_num - nctime[before]) / cls.dtime
     return fbefore + (fafter - fbefore) * time_ratio
コード例 #33
0
def nc_mean_daily_temp(half_hourly_temps, daily_mean_temps, times, time_lst):
    """ Finds mean daily temperatures from half hourly temperature data
    :param half_hourly_temps: half hourly temperatures as netcdf variable
    :param daily_mean_temps: empty daily mean temperature netcdf variable
    :param times: half hourly times as netcdf variable
    :param time_lst: list of daily datetime objects
    :return:
    """
    for t in enumerate(time_lst):
        idx = nC.date2index(t[1], times)
        mean_daily_temp = np.mean(half_hourly_temps[idx:idx + 48, 0, 0])
        daily_mean_temps[t[0], 0, 0] = mean_daily_temp
    return 'yay'
コード例 #34
0
 def find_date_idx(date, data):
     """ Finds index in netcdf file for given date
     :param date: date in format specified in DalecData class
     :return: date index
     """
     if type(date) == int:
         d_time = dt.datetime(date, 1, 1)
     elif type(date) == tuple:
         d_time = dt.datetime(date[0], date[1], date[2])
     else:
         raise ValueError('Date wrong format, please check input')
     times = data.variables['time']
     return nC.date2index(d_time, times, select='nearest')
コード例 #35
0
def nc_day_len(is_day, day_len, times, time_lst):
    """ Finds total daily global radiation from half hourly global radiation data
    :param is_day: half hourly 'is day' netcdf variable with values of 1 day or 0 night
    :param day_len: empty day length netcdf variable (hours)
    :param times: half hourly times as netcdf variable
    :param time_lst: list of daily datetime objects
    :return:
    """
    for t in enumerate(time_lst):
        idx = nC.date2index(t[1], times)
        where_day = np.where(is_day[idx:idx + 48, 0, 0] == 1)[0]
        day_len[t[0], 0, 0] = len(where_day) * 0.5
    return 'yay'
コード例 #36
0
def netcdf2NumpyDailyTimeSlice(
        ncFile,
        varName,
        startDate,  #endDate,
        useDoy=None,
        cloneMapFileName=None,
        LatitudeLongitude=True,
        specificFillValue=None):

    logger.debug('reading variable: ' + str(varName) + ' from the file: ' +
                 str(ncFile))
    f = read_netCDF(ncFile)
    varName = str(varName)
    f = rename_latlong_dims(f, LatitudeLongitude)
    t_varname = get_time_variable_name(f)
    t_dimname = get_time_dimension_name(f)
    t_unit = get_time_units(f.variables[t_varname])
    t_calendar = get_time_calendar(f.variables[t_varname])
    startDate = format_date(startDate, f.variables[t_varname], useDoy)
    endDate = startDate + datetime.timedelta(days=1)
    lastDateInNC = nc.num2date(f.variables[t_varname][-1],
                               units=t_unit,
                               calendar=t_calendar)
    startIndex = nc.date2index(
        datetime.datetime(startDate.year, startDate.month, startDate.day, 0, 0,
                          0), f.variables[t_varname])
    if endDate <= lastDateInNC:
        endIndex = nc.date2index(
            datetime.datetime(endDate.year, endDate.month, endDate.day, 0, 0,
                              0), f.variables[t_varname])
        # print endIndex
    else:
        endIndex = f.variables[t_varname].size
    timeIndex = np.arange(startIndex, endIndex)
    # print timeIndex
    arr = resample_nc_data(f, varName, cloneMapFileName, t_dimname, timeIndex)
    f = None
    return arr
コード例 #37
0
ファイル: geospatial.py プロジェクト: AHeadman-USGS/WaterPY
def build_temps(f, x, y):
    """
    This also needs a docstring.
    """

    # Read in and build the netCDF4 parameters
    nc = netCDF4.Dataset(f)
    lat = nc.variables['y'][:]
    lon = nc.variables['x'][:]
    time_var = nc.variables['time']
    dtime = netCDF4.num2date(time_var[:], time_var.units)

    # Building the indexes points.
    # By default this starts when Daymet starts, though this could be flexible.
    # Currently, this only accepts Daymet data.
    start = dt.datetime(1980, 1, 1)
    end = dt.datetime.utcnow()
    istart = netCDF4.date2index(start, time_var, select='nearest')
    istop = netCDF4.date2index(end, time_var, select='nearest')
    lati = y
    loni = x
    ix = near(lon, loni)
    iy = near(lat, lati)

    # Selecting/subsetting the NetCDF dataset.
    temps = nc.variables['tmax'][:]
    hs = temps[istart:istop, ix, iy]
    tim = dtime[istart:istop]

    # Arranging data into pandas df.
    temps_ts = pd.Series(hs, index=tim, name='temperature (celsius)')
    temps_ts = pd.DataFrame(temps_ts)
    temps_ts.reset_index(inplace=True)
    temps_ts.columns = ['Index', 'temperature (celsius)']
    temps_ts['date'] = temps_ts['Index']
    temps_ts.set_index('Index', drop=True, inplace=True)

    return temps_ts
コード例 #38
0
ファイル: geospatial.py プロジェクト: AHeadman-USGS/WaterPY
def build_prcp(f, x, y):
    """
    This needs a docstring!
    """

    # Read in and build the netCDF4 parameters
    nc = netCDF4.Dataset(f)
    lat = nc.variables['y'][:]
    lon = nc.variables['x'][:]
    time_var = nc.variables['time']
    dtime = netCDF4.num2date(time_var[:], time_var.units)

    # Building the indexes points.
    # By default this starts when Daymet starts, though this could be flexible.
    # Currently, this only accepts Daymet data.
    start = dt.datetime(1980, 1, 1)
    end = dt.datetime.utcnow()
    istart = netCDF4.date2index(start, time_var, select='nearest')
    istop = netCDF4.date2index(end, time_var, select='nearest')
    lati = y
    loni = x
    ix = near(lon, loni)
    iy = near(lat, lati)

    # Selecting the variables.
    prcp = nc.variables['prcp'][:]
    hs = prcp[istart:istop, ix, iy]
    tim = dtime[istart:istop]

    # Arranging data into pandas df.
    prcp_ts = pd.Series(hs, index=tim, name='precipitation (mm/day)')
    prcp_ts = pd.DataFrame(prcp_ts)
    prcp_ts.reset_index(inplace=True)
    prcp_ts.columns = ['Index', 'precipitation (mm/day)']
    prcp_ts['date'] = prcp_ts['Index']
    prcp_ts.set_index('Index', drop=True, inplace=True)

    return prcp_ts
コード例 #39
0
def getMeteoValue(meteoVar, time, latitude, longitude):
    global year
    global root_grp
    meteoVariables = []
    currentYear = pd.Timestamp(time).year
    #currentYear = pd.to_datetime(time).dt.year
    #print(currentYear)
    if (currentYear != year):
        year = currentYear
        print 'data/d3_' + str(year) + '.nc'
        root_grp = nc4.Dataset('data/d3_' + str(year) + '.nc')
    meteoVariable = root_grp.variables[meteoVar]

    #print meteoVariable.dimensions
    latVar = root_grp.variables['latitude']
    lonVar = root_grp.variables['longitude']
    totalLats = len(latVar)
    totalLons = len(lonVar)
    maxLat = max(latVar)
    maxLon = max(lonVar)
    minLat = min(latVar)
    minLon = min(lonVar)
    cellSizeLat = (max(latVar) - min(latVar)) / totalLats
    cellSizeLat = latVar[1] - latVar[0]
    cellSizeLon = lonVar[1] - lonVar[0]
    #(max(lonVar) - min(lonVar)) / totalLons
    cellTileLat = math.ceil((-minLat + latitude) / cellSizeLat)
    cellTileLon = math.ceil((-minLon + longitude) / cellSizeLon)

    time_var = root_grp.variables['time']

    #dtime = nc4.num2date(time_var[:], time_var.units)

    #prova = time_var.sel(time = time)

    test = nc4.date2index(pd.Timestamp(time), time_var, select="nearest")

    #print("hello")

    #print precipitation[test,cellTileLat,cellTileLon]

    return meteoVariable[test, cellTileLat, cellTileLon]


#print(len(temp))
#print(temp[0])

#hs = pd.Series(root_grp.variables['area'].dimensions['lat'])

#getMeteoValue(datetime.datetime.strptime("2015-01-04 10:00", "%Y-%m-%d %H:%M"),52,6)
コード例 #40
0
def get_ncfiles_catalog(station_id, jd_start, jd_stop):
    station_name = station_id.split(":")[-1]
    uri = 'http://dods.ndbc.noaa.gov/thredds/dodsC/data/adcp'
    url = ('%s/%s/' % (uri, station_name))
    urls = _url_lister(url)
    filetype = "*.nc"
    file_list = [filename for filename in fnmatch.filter(urls, filetype)]
    files = [fname.split('/')[-1] for fname in file_list]
    urls = ['%s/%s/%s' % (uri, station_name, fname) for fname in files]

    nc = MFDataset(urls)

    time_dim = nc.variables['time']
    calendar = 'gregorian'
    idx_start = date2index(jd_start, time_dim, calendar=calendar,
                           select='nearest')
    idx_stop = date2index(jd_stop, time_dim, calendar=calendar,
                          select='nearest')

    dir_dim = nc.variables['water_dir'][idx_start:idx_stop, ...].squeeze()
    speed_dim = nc.variables['water_spd'][idx_start:idx_stop, ...].squeeze()
    if dir_dim.ndim != 1:
        dir_dim = dir_dim[:, 0]
        speed_dim = speed_dim[:, 0]
    time_dim = nc.variables['time']
    dates = num2date(time_dim[idx_start:idx_stop],
                     units=time_dim.units,
                     calendar='gregorian').squeeze()
    data = dict()
    data['sea_water_speed (cm/s)'] = speed_dim
    col = 'direction_of_sea_water_velocity (degree)'
    data[col] = dir_dim
    time = dates
    columns = ['sea_water_speed (cm/s)',
               'direction_of_sea_water_velocity (degree)']
    df = DataFrame(data=data, index=time, columns=columns)
    return df
コード例 #41
0
def get_FVCOM_temp(
        latp,
        lonp,
        dtime,
        depth='bottom',
        mindistance=2,
        fortype='temperature'):  # gets modeled temp using GOM3 forecast
    ''' 
    fortype list ['tempdepth','temperature']
    the unite of the mindistance is mile
    Taken primarily from Rich's blog at: http://rsignell-usgs.github.io/blog/blog/2014/01/08/fvcom/ on July 30, 2018 
    where lati and loni are the position of interest, dtime is the datetime, and layer is "-1" for bottom 
    '''
    m2k_factor = 0.62137119  #mile to kilometers parameter
    #urlfvcom = 'http://www.smast.umassd.edu:8080/thredds/dodsC/fvcom/hindcasts/30yr_gom3'
    #urlfvcom = 'http://www.smast.umassd.edu:8080/thredds/dodsC/FVCOM/NECOFS/Forecasts/NECOFS_FVCOM_OCEAN_MASSBAY_FORECAST.nc'
    #    urlfvcom = 'http://www.smast.umassd.edu:8080/thredds/dodsC/FVCOM/NECOFS/Forecasts/NECOFS_GOM3_FORECAST.nc'
    urlfvcom = get_FVCOM_url(dtime)
    #if math.isnan(urlfvcom):
    if urlfvcom == np.nan:
        if fortype == 'temperature':
            return np.nan
        elif fortype == 'tempdepth':
            return np.nan, np.nan
        else:
            'please input write fortype'
    nc = netCDF4.Dataset(urlfvcom).variables
    #first find the index of the grid
    lat = nc['lat'][:]
    lon = nc['lon'][:]
    inode, dist = nearlonlat(lon, lat, lonp, latp)
    if dist > mindistance / m2k_factor / 111:
        return np.nan, np.nan
    #second find the index of time
    time_var = nc['time']
    itime = netCDF4.date2index(dtime, time_var,
                               select='nearest')  # where startime in datetime
    if depth == 'bottom':
        layer = -1
    else:
        depth_distance = abs(nc['siglay'][:, inode] * nc['h'][inode] + depth)
        layer = np.argmin(depth_distance)

    if fortype == 'temperature':
        return nc['temp'][itime, layer, inode]
    elif fortype == 'tempdepth':
        return nc['temp'][itime, layer, inode], nc['h'][inode]
    else:
        return 'please input write fortype'
コード例 #42
0
ファイル: netcdf2PCraster.py プロジェクト: navass11/SPHY
def netcdf2pcrDynamic(
    self, pcr, forcing
):  #ncFile, varName, dateInput, method, factor, x, y, xi, yi, xIdxSta, xIdxEnd, yIdxSta, yIdxEnd):
    #-read netcdf file
    f = nc.Dataset(getattr(self, forcing + 'NC'))
    filecache[getattr(self, forcing + 'NC')] = f

    #-get index from netcdf corresponding with current date
    idx = int(nc.date2index(self.curdate, f.variables['time'], select='exact'))

    #-get raw netcdf gridded data from netcdf, transform to array and multiply with factor
    if getattr(self, forcing + 'InProj') == "rotated":
        z = f.variables[getattr(
            self, forcing +
            'VarName')][idx,
                        getattr(self, forcing +
                                'xyUL'):(getattr(self, forcing + 'xyLL') + 1),
                        getattr(self, forcing +
                                'xyUR'):(getattr(self, forcing + 'xyLR') + 1)]
    else:
        z = f.variables[getattr(
            self,
            forcing + 'VarName')][idx,
                                  getattr(self, forcing + 'yIdxSta'):(
                                      getattr(self, forcing + 'yIdxEnd') + 1),
                                  getattr(self, forcing + 'xIdxSta'):(
                                      getattr(self, forcing + 'xIdxEnd') + 1)]
    z = np.asarray(z).ravel()
    with np.errstate(
            invalid='ignore'
    ):  # surpress error message when there are already nans in the z array
        z = np.where(z <= -9999, np.nan, z) * getattr(self, forcing + 'Factor')

    #-remove nans from arrays
    x = getattr(self, forcing + 'x')[~np.isnan(z)]
    y = getattr(self, forcing + 'y')[~np.isnan(z)]
    z = z[~np.isnan(z)]

    #-interpolate with method (linear or cubic)
    zi = griddata(
        (x, y),
        z, (getattr(self, forcing + 'xi'), getattr(self, forcing + 'yi')),
        method=getattr(self, forcing + 'Method'))
    zi = np.where(np.isnan(zi), -9999, zi)

    #-convert to PCRaster Python map
    output = pcr.numpy2pcr(pcr.Scalar, zi, -9999)

    return output
コード例 #43
0
ファイル: image.py プロジェクト: sebhahn/pynetCF
    def __getitem__(self, key):

        if type(key) == datetime.datetime:
            index = netCDF4.date2index(
                key, self.dataset.variables[self.time_var])
            data = {}
            for var in self._get_all_ts_variables():
                data[var] = self.dataset.variables[var][:, index]
            return data
        else:
            gpi = np.atleast_1d(key)
            for i, gp in enumerate(gpi):
                data = super(ArrayStack, self).read_all_ts(gp)

            return pd.DataFrame(data, index=self.times)
コード例 #44
0
def nc_day_mean_temp(is_day, hh_temp, mean_t_day, times, time_lst):
    """ Finds total daily global radiation from half hourly global radiation data
    :param is_day: half hourly 'is day' netcdf variable with values of 1 day or 0 night
    :param hh_temp: half hourly temperatures netcdf variable
    :param mean_t_day: netcdf variable to fill with mean daytime temperatures
    :param times: half hourly times as netcdf variable
    :param time_lst: list of daily datetime objects
    :return:
    """
    for t in enumerate(time_lst):
        idx = nC.date2index(t[1], times)
        where_day = np.where(is_day[idx:idx + 48, 0, 0] == 1)[0]
        mean_t_day[t[0], 0, 0] = np.mean(hh_temp[idx + where_day[0]:idx +
                                                 where_day[-1]])
    return 'yay'
コード例 #45
0
def nc_total_daily_rg(half_hourly_rg, daily_rg, times, time_lst):
    """ Finds total daily global radiation from half hourly global radiation data
    :param half_hourly_rg: half hourly global radiation as netcdf variable (W m-2)
    :param daily_rg: empty total daily global radiation netcdf variable (M J m-2 day-1)
    :param times: half hourly times as netcdf variable
    :param time_lst: list of daily datetime objects
    :return:
    """
    for t in time_lst:
        idx = nC.date2index(t, times)
        total_daily_rg = 30 * 60 * 1e-6 * np.sum(
            half_hourly_rg[idx:idx + 48, 0,
                           0])  # Convert W m-2 to M J m-2 day-1
        daily_rg[idx, 0, 0] = total_daily_rg
    return 'yay'
コード例 #46
0
ファイル: image.py プロジェクト: wpreimes/pynetCF
    def __getitem__(self, key):

        if type(key) == datetime.datetime:
            index = netCDF4.date2index(key,
                                       self.dataset.variables[self.time_var])
            data = {}
            for var in self._get_all_ts_variables():
                data[var] = self.dataset.variables[var][:, index]
            return data
        else:
            gpi = np.atleast_1d(key)
            for i, gp in enumerate(gpi):
                data = super(ArrayStack, self).read_all_ts(gp)

            return pd.DataFrame(data, index=self.times)
コード例 #47
0
    def runTest(self):
        # The test files have no calendar attribute on the time variable.
        calendar = 'standard'

        # Get the real dates
        # skip this until cftime pull request #55 is in a released
        # version (1.0.1?). Otherwise, fix for issue #808 breaks this
        if parse_version(cftime.__version__) >= parse_version('1.0.1'):
            dates = []
            for file in self.files:
                f = Dataset(file)
                t = f.variables['time']
                dates.extend(num2date(t[:], t.units, calendar))
                f.close()

        # Compare with the MF dates
        f = MFDataset(self.files,check=True)
        t = f.variables['time']

        T = MFTime(t, calendar=calendar)
        assert_equal(T.calendar, calendar)
        assert_equal(len(T), len(t))
        assert_equal(T.shape, t.shape)
        assert_equal(T.dimensions, t.dimensions)
        assert_equal(T.typecode(), t.typecode())
        # skip this until cftime pull request #55 is in a released
        # version (1.0.1?). Otherwise, fix for issue #808 breaks this
        if parse_version(cftime.__version__) >= parse_version('1.0.1'):
            assert_array_equal(num2date(T[:], T.units, T.calendar), dates)
        assert_equal(date2index(datetime.datetime(1980, 1, 2), T), 366)
        f.close()

        # Test exception is raised when no calendar attribute is available on the
        # time variable.
        with MFDataset(self.files, check=True) as ds:
            with self.assertRaises(ValueError):
                MFTime(ds.variables['time'])

        # Test exception is raised when the calendar attribute is different on the
        # variables. First, add calendar attributes to file. Note this will modify
        # the files inplace.
        calendars = ['standard', 'gregorian']
        for idx, f in enumerate(self.files):
            with Dataset(f, 'a') as ds:
                ds.variables['time'].calendar = calendars[idx]
        with MFDataset(self.files, check=True) as ds:
            with self.assertRaises(ValueError):
                MFTime(ds.variables['time'])
コード例 #48
0
    def runTest(self):
        # The test files have no calendar attribute on the time variable.
        calendar = 'standard'

        # Get the real dates
        # skip this until cftime pull request #55 is in a released
        # version (1.0.1?). Otherwise, fix for issue #808 breaks this
        if parse_version(cftime.__version__) >= parse_version('1.0.1'):
            dates = []
            for file in self.files:
                f = Dataset(file)
                t = f.variables['time']
                dates.extend(num2date(t[:], t.units, calendar))
                f.close()

        # Compare with the MF dates
        f = MFDataset(self.files, check=True)
        t = f.variables['time']

        T = MFTime(t, calendar=calendar)
        assert_equal(T.calendar, calendar)
        assert_equal(len(T), len(t))
        assert_equal(T.shape, t.shape)
        assert_equal(T.dimensions, t.dimensions)
        assert_equal(T.typecode(), t.typecode())
        # skip this until cftime pull request #55 is in a released
        # version (1.0.1?). Otherwise, fix for issue #808 breaks this
        if parse_version(cftime.__version__) >= parse_version('1.0.1'):
            assert_array_equal(num2date(T[:], T.units, T.calendar), dates)
        assert_equal(date2index(datetime.datetime(1980, 1, 2), T), 366)
        f.close()

        # Test exception is raised when no calendar attribute is available on the
        # time variable.
        with MFDataset(self.files, check=True) as ds:
            with self.assertRaises(ValueError):
                MFTime(ds.variables['time'])

        # Test exception is raised when the calendar attribute is different on the
        # variables. First, add calendar attributes to file. Note this will modify
        # the files inplace.
        calendars = ['standard', 'gregorian']
        for idx, f in enumerate(self.files):
            with Dataset(f, 'a') as ds:
                ds.variables['time'].calendar = calendars[idx]
        with MFDataset(self.files, check=True) as ds:
            with self.assertRaises(ValueError):
                MFTime(ds.variables['time'])
コード例 #49
0
def get_location_cdd(lat: float, long: float, year: int, model: str) -> float:
    d = get_dataset_for_climate_model(model)
    # print(d)

    time = d['/time']
    time_idx = date2index(datetime(year, 1, 1, 0, 0), time)

    lats = d['/lat']
    lat_idx = bisect_left(lats[:], lat)

    longs = d['/lon']
    long_idx = bisect_left(longs[:], long)

    cdd = d['/cdd']

    return cdd[time_idx][lat_idx][long_idx]
コード例 #50
0
def get_doppio(lat,lon,time,depth):
    """
    notice:
        the format of time is like "%Y-%m-%d %H:%M:%S"
        the default depth is under the bottom depth
    the module only output the temperature of point location
    """
    time=dt.strptime(time,'%Y-%m-%d %H:%M:%S') # transform time format
    if (time -datetime.datetime(2017,11,1,0,0,0)).total_seconds()<0:
        print('the date can\'t be earlier than 2017-11-1')
        return np.nan
    
    url_time=time.strftime('%Y-%m-%d')#
    url=get_doppio_url(url_time)
    nc=netCDF4.Dataset(url).variables
    #first find the index of the grid 
    lons=nc['lon_rho'][:]
    lats=nc['lat_rho'][:]
    temp=nc['temp']
    #second find the index of time
    doppio_time=nc['time']
    itime = netCDF4.date2index(time,doppio_time,select='nearest')# where startime in datetime
    # figure out layer from depth
    
    min_distance=dist(lat1=lat,lon1=lon,lat2=lats[0][0],lon2=lons[0][0])   
    index_1,index_2=0,0
    for i in range(len(lons)):
        for j in range(len(lons[i])):
            if min_distance>dist(lat1=lat,lon1=lon,lat2=lats[i][j],lon2=lons[i][j]):
                min_distance=dist(lat1=lat,lon1=lon,lat2=lats[i][j],lon2=lons[i][j])
                index_1=i
                index_2=j
    
    doppio_depth=nc['h'][index_1][index_2]
    
    if depth > doppio_depth:# case of bottom
            S_coordinate=1
    else:
        S_coordinate=float(depth)/float(doppio_depth)
    if 0<=S_coordinate<1:
        doppio_temp=temp[itime,39-int(S_coordinate/0.025),index_1,index_2]# because there are 0.025 between each later
    elif S_coordinate==1:
        doppio_temp=temp[itime][0][index_1][index_2]
    else:
        doppio_temp=temp[itime][0][index_1][index_2]
    return doppio_temp
コード例 #51
0
ファイル: training.py プロジェクト: mogismog/retorcast
def get_training_data(analog_dates,leadtime,train_fname,predictor_name,reforecast_dir):
    """
    Function that collects training data for statistical
    post-processing.
    
    analog_dates - list of datetime objects of potential analogous dates
    leadtime - forecast leadtime
    reforecast_dir - path to verification data
    """
    
    # --- First, get analog dates
    nc_fname = '{}refcst2_{}_day{}.nc'.format(reforecast_dir,train_fname,leadtime)
    trainData = Dataset(nc_fname,'r')
    train_idxs = date2index(analog_dates,trainData.variables['time'])
    trainData = np.asfortranarray(trainData.variables[predictor_name][train_idxs,:,:])

    
    return trainData    
コード例 #52
0
def get_jules_state(date_utc, nc_file='jules/output/wallerfing_79_12.3_hourly.nc'):
    """Function that returns a stateVector instance for a given time.
    :param date_utc: datetime object of when to extract JULES output.
    :type date_utc: object
    :param nc_file: JULES output file from which to extract data.
    :type nc_file: str
    :return: Instance of stateVector class.
    :rtype: instance
    """
    nc_dat = nc.Dataset(nc_file, 'r')
    t_idx = nc.date2index(date_utc, nc_dat.variables['time'], select='nearest')
    state_inst = stateVector()
    state_inst.date_utc = nc.num2date(nc_dat.variables['time'][t_idx], nc_dat.variables['time'].units)
    state_inst.lai = nc_dat.variables['croplai'][t_idx, 0, 0, 0]  # (m2 m-2)
    state_inst.can_height = nc_dat.variables['cropcanht'][t_idx, 0, 0, 0]  # (m)
    state_inst.soil_moisture = nc_dat.variables['smcl'][t_idx, 0, 0, 0]  # (kg m-2)
    nc_dat.close()
    return state_inst
コード例 #53
0
def process_co2_flux_nighttime_d(clipped_co2_flux,
                                 qc_co2_flux,
                                 nee_night,
                                 nee_night_std,
                                 is_day,
                                 wind_dir,
                                 origin,
                                 foot_print,
                                 times,
                                 time_lst,
                                 qc2_tol=1,
                                 qc1_tol=5):
    """ Produces a nighttime NEE product
    :param clipped_co2_flux: half hourly clipped co2 flux as netcdf variable
    :param qc_co2_flux: qc flags as nc variable corresponding to half hourly co2 flux
    :param nee_night: nc variable to fill with processed data
    :param nee_night_std: nc variable to fill with processed data standard deviation
    :param times: half hourly times as netcdf variable
    :param time_lst: list of daily datetime objects
    :return:
    """
    for t in enumerate(time_lst):
        idx = nC.date2index(t[1], times)
        night_idx1 = idx + np.where(is_day[idx:idx + 48, 0, 0] == 1)[0][-1] + 1
        night_idx2 = night_idx1
        while is_day[night_idx2, 0, 0] == 0:
            night_idx2 += 1
            if night_idx2 >= len(times):
                night_idx2 = float('NaN')
                break
        if np.isnan(night_idx2) == True:
            nee_night[t[0], 0, 0] = float('NaN')
            break
        else:
            quality_control_co2_flux_daily(clipped_co2_flux, qc_co2_flux,
                                           nee_night, nee_night_std, wind_dir,
                                           origin, foot_print, night_idx1,
                                           night_idx2 - 1, t[0], is_day,
                                           qc2_tol, qc1_tol)
            if nee_night[t[0], 0, 0] < 0.0:
                nee_night[t[0], 0, 0] = float('NaN')
            else:
                continue
    return 'yay'
コード例 #54
0
def get_FVCOM_bottom_temp(lati, loni, dtime,
                          layer):  # gets modeled temp using GOM3 forecast
    '''
        Taken primarily from Rich's blog at: http://rsignell-usgs.github.io/blog/blog/2014/01/08/fvcom/ on July 30, 2018
        where lati and loni are the position of interest, dtime is the datetime, and layer is "-1" for bottom
        '''
    #urlfvcom = 'http://www.smast.umassd.edu:8080/thredds/dodsC/fvcom/hindcasts/30yr_gom3'
    #urlfvcom = 'http://www.smast.umassd.edu:8080/thredds/dodsC/FVCOM/NECOFS/Forecasts/NECOFS_FVCOM_OCEAN_MASSBAY_FORECAST.nc'
    urlfvcom = 'http://www.smast.umassd.edu:8080/thredds/dodsC/FVCOM/NECOFS/Forecasts/NECOFS_GOM3_FORECAST.nc'
    nc = netCDF4.Dataset(urlfvcom).variables
    #first find the index of the grid
    lat = nc['lat'][:]
    lon = nc['lon'][:]
    inode = nearlonlat(lon, lat, loni, lati)
    #second find the index of time
    time_var = nc['time']
    itime = netCDF4.date2index(dtime, time_var,
                               select='nearest')  # where startime in datetime
    return nc['temp'][itime, layer, inode]
コード例 #55
0
def getERA5_1DAYfilename(confM2R, year, month, day, myvar):

    if len(confM2R.timeobject) == 0:
        mcdf = MFDataset(confM2R.atmosphericpath + "*.nc")
        confM2R.timeobject = mcdf.variables["time"]

        print("Loaded all ERA5 timesteps: {}".format(confM2R.timeobject[:]))
    index = date2index(datetime(year, month, day, 0, 0),
                       confM2R.timeobject,
                       calendar=confM2R.timeobject.calendar,
                       select="nearest")
    seldate = num2date(confM2R.timeobject[index],
                       units=confM2R.timeobject.units,
                       calendar=confM2R.timeobject.calendar)

    print("selected index {}".format(seldate))

    return '{}soda3.3.2_5dy_ocean_reg_{:04}_{:02}_{:02}.nc'.format(
        confM2R.atmosphericpath, seldate.year, seldate.month, seldate.day)
コード例 #56
0
def get_espresso_temp(time,lat,lon,depth) :    
    #according to doppio model structure , data is from 2009-10-12 to 2017-1-1
    time=pd.to_datetime(time)
    url=get_url(time)
    nc=netCDF4.Dataset(url).variables
    #first find the index of the grid 
    lons=nc['lon_rho'][:]
    lats=nc['lat_rho'][:]
    temp=nc['temp']
    #second find the index of time
    if time<=datetime(2013,5,18):
        espresso_time=nc['ocean_time']
    else:
        espresso_time=nc['time']
    itime = netCDF4.date2index(time,espresso_time,select='nearest')
    index = nearest_point_index2(lon,lat,lons,lats) 
    depth_layers=nc['h'][index[0][0]][index[1][0]]*nc['s_rho']
    index_depth=np.argmin(abs(depth+depth_layers))#depth_layers are negative numbers
    espresso_temp=temp[itime,index_depth,index[0][0],index[1][0]]
    return espresso_temp
コード例 #57
0
ファイル: tst_multifile2.py プロジェクト: 8900/netCDF4-Python
 def runTest(self):
     # Get the real dates
     dates = []
     for file in self.files:
         f = Dataset(file)
         t = f.variables['time']
         dates.extend(num2date(t[:], t.units, t.calendar))
         f.close()
     
     # Compare with the MF dates
     f = MFDataset(self.files,check=True)
     t = f.variables['time']
     mfdates = num2date(t[:], t.units, t.calendar)
     
     T = MFTime(t)
     assert_equal(len(T), len(t))
     assert_equal(T.shape, t.shape)
     assert_equal(T.dimensions, t.dimensions)
     assert_equal(T.typecode(), t.typecode())
     assert_array_equal(num2date(T[:], T.units, T.calendar), dates) 
     assert_equal(date2index(datetime.datetime(1980, 1, 2), T), 366)