Пример #1
0
    def harmonics(self, ind, twodim=True, **kwarg):

        if twodim:
            self.coef = ut_solv(self.time, self.ua[:, ind], self.va[:, ind],
                                self.lat[ind], **kwarg)

            self.QC.append('ut_solv done for velocity')

        else:
            self.coef = ut_solv(self.time, self.ua[:, ind], [],
                                self.lat[ind], **kwarg)

            self.QC.append('ut_solv done for elevation')
Пример #2
0
    def harmonics(self, time_ind=slice(None), **kwarg):
        '''
        Description:
        -----------
        This function performs a harmonic analysis on the sea surface elevation
        time series or the velocity components timeseries.

        Outputs:
        -------
          - harmo = harmonic coefficients, dictionary

        Keywords:
        --------
          - time_ind = time indices to work in, list of integers

        Options:
        -------
        Options are the same as for ut_solv, which are shown below with
        their default values:
            conf_int=True; cnstit='auto'; notrend=0; prefilt=[]; nodsatlint=0;
            nodsatnone=0; gwchlint=0; gwchnone=0; infer=[]; inferaprx=0;
            rmin=1; method='cauchy'; tunrdn=1; linci=0; white=0; nrlzn=200;
            lsfrqosmp=1; nodiagn=0; diagnplots=0; diagnminsnr=2;
            ordercnstit=[]; runtimedisp='yyy'

        Notes:
        -----
        For more detailed information about ut_solv, please see
        https://github.com/wesleybowman/UTide

        '''

        harmo = ut_solv(self._var.matlabTime[time_ind], self._var.el, [],
                        self._var.lat, **kwarg)
        return harmo
Пример #3
0
    def harmonics(self, time_ind=slice(None), **kwarg):
        '''
        Description:
        -----------
        This function performs a harmonic analysis on the sea surface elevation
        time series or the velocity components timeseries.

        Outputs:
        -------
          - harmo = harmonic coefficients, dictionary

        Keywords:
        --------
          - time_ind = time indices to work in, list of integers

        Options:
        -------
        Options are the same as for ut_solv, which are shown below with
        their default values:
            conf_int=True; cnstit='auto'; notrend=0; prefilt=[]; nodsatlint=0;
            nodsatnone=0; gwchlint=0; gwchnone=0; infer=[]; inferaprx=0;
            rmin=1; method='cauchy'; tunrdn=1; linci=0; white=0; nrlzn=200;
            lsfrqosmp=1; nodiagn=0; diagnplots=0; diagnminsnr=2;
            ordercnstit=[]; runtimedisp='yyy'

        Notes:
        -----
        For more detailed information about ut_solv, please see
        https://github.com/wesleybowman/UTide

        '''

        harmo = ut_solv(self._var.matlabTime[time_ind],
                       self._var.el, [],
                       self._var.lat, **kwarg)
        return harmo
Пример #4
0
def tideGauge(datafiles, Struct):
    dgFilename = '/array/home/rkarsten/common_tidal_files/data/observed/DG/TideGauge/DigbyWharf_015893_20140115_2221_Z.mat'

    gpFilename = '/array/home/rkarsten/common_tidal_files/data/observed/GP/TideGauge/Westport_015892_20140325_1212_Z.mat'

    dgtg = sio.loadmat(dgFilename, struct_as_record=False, squeeze_me=True)
    gptg = sio.loadmat(gpFilename, struct_as_record=False, squeeze_me=True)

    ut_constits = ['M2','S2','N2','K2','K1','O1','P1','Q1']

    print 'Westport TideGauge'
    coef_gptg = ut_solv(gptg['RBR'].date_num_Z,
                        (gptg['RBR'].data-np.mean(gptg['RBR'].data)), [],
                        gptg['RBR'].lat, cnstit=ut_constits, notrend=True,
                        rmin=0.95, method='ols', nodiagn=True, linci=True,
                        ordercnstit='frq')

    print 'DigbyWharf TideGauge'
    coef_dgtg = ut_solv(dgtg['RBR'].date_num_Z,
                        (dgtg['RBR'].data-np.mean(dgtg['RBR'].data)), [],
                        dgtg['RBR'].lat, cnstit=ut_constits, notrend=True,
                        rmin=0.95, method='ols', nodiagn=True, linci=True,
                        ordercnstit='frq')

    struct = np.array([])
    for filename in datafiles:

        print filename
        data = nc.Dataset(filename, 'r')
        lat = data.variables['lat'][:]
        lon = data.variables['lon'][:]
        time_JD = data.variables['time_JD'][:]
        time_second = data.variables['time_second'][:]
        time = time_JD + 678942 + time_second / (24*3600)

        #time = mjd2num(time)

        tg_gp_id = np.argmin(np.sqrt((lon-gptg['RBR'].lon)**2+(lat-gptg['RBR'].lat)**2))
        tg_dg_id = np.argmin(np.sqrt((lon-dgtg['RBR'].lon)**2+(lat-dgtg['RBR'].lat)**2))

        #elgp = data.variables['zeta'][tg_gp_id, :]
        #eldg = data.variables['zeta'][tg_dg_id, :]
        elgp = data.variables['zeta'][:, tg_gp_id]
        eldg = data.variables['zeta'][:, tg_dg_id]

        coef_dg = ut_solv(time, eldg, [], dgtg['RBR'].lat, cnstit=ut_constits,
                          notrend=True, rmin=0.95, method='ols', nodiagn=True,
                          linci=True, ordercnstit='frq')

        coef_gp = ut_solv(time, elgp, [], gptg['RBR'].lat, cnstit=ut_constits,
                          notrend=True, rmin=0.95, method='ols', nodiagn=True,
                          linci=True, ordercnstit='frq')


        Name = filename.split('/')[-3]
        Name = '2012_station_run'

        print Name

        obs_loc = {'name':Name, 'type':'TideGauge',
                   'mod_time':time, 'dg_time':dgtg['RBR'].date_num_Z,
                   'gp_time':gptg['RBR'].date_num_Z,
                   'lon':lon, 'lat':lat,
                   'dg_tidegauge_harmonics': coef_dgtg,
                   'gp_tidegauge_harmonics':coef_gptg,
                   'dg_mod_harmonics': coef_dg,
                   'gp_mod_harmonics': coef_gp,
                   'dg_tg_data':dgtg['RBR'].data,
                   'gp_tg_data':gptg['RBR'].data,
                   'eldg':eldg, 'elgp':elgp}

        struct = np.hstack((struct, obs_loc))

        Struct[Name] = np.hstack((Struct[Name], struct))


    #pickle.dump(struct, open("structADCP.p", "wb"))
    return Struct
Пример #5
0
def adcp(datafiles, debug=False):

    if debug:
        adcpFilename = '/home/wesley/github/karsten/adcp/testADCP.txt'
    else:
        adcpFilename = '/array/home/107002b/github/karsten/adcp/acadia_dngrid_adcp_2012.txt'

    #adcpFilename = '/home/wesleyb/github/karsten/adcp/dngrid_adcp_2012.txt'
    adcp = pd.read_csv(adcpFilename)

    for i,v in enumerate(adcp['Latitude']):
        path = adcp.iloc[i, -1]
        if path != 'None':
            print adcp.iloc[i, 0]
            #print lonlat[i,1], uvnodell[ii,1]

            ADCP = pd.read_csv(path, index_col=0)
            ADCP.index = pd.to_datetime(ADCP.index)

            adcpTime = np.empty(ADCP.index.shape)

            for j, jj in enumerate(ADCP.index):
                adcpTime[j] = datetime2matlabdn(jj)

            adcpCoef = ut_solv(adcpTime, ADCP['u'].values,
                               ADCP['v'].values, v,
                               cnstit='auto', rmin=0.95, notrend=True,
                               method='ols', nodiagn=True, linci=True,
                               conf_int=True)

            adcpData = adcpCoef

    obs = pd.DataFrame({'u':ADCP['u'].values, 'v':ADCP['v'].values})
    Struct = {}


    for filename in datafiles:
        print filename
        data = nc.Dataset(filename, 'r')
        #lat = data.variables['lat'][:]
        #lon = data.variables['lon'][:]
        time_JD = data.variables['time_JD'][:]
        time_second = data.variables['time_second'][:]
        time = time_JD + 678942 + time_second/(24*3600)

        lonc = data.variables['lon'][:]
        latc = data.variables['lat'][:]
        ua = data.variables['ua']
        va = data.variables['va']
        #trinodes = data.variables['nv'][:]

        #time = mjd2num(time)

        lonlat = np.array([adcp['Longitude'], adcp['Latitude']]).T

        #index = closest_point(lonlat, lon, lat)
        index = closest_point(lonlat, lonc, latc)

        adcpData = pd.DataFrame()
        runData = pd.DataFrame()

        Name = filename.split('/')[-3]
        Name = '2012_station_run'

        print Name
        struct = np.array([])

        for i, ii in enumerate(index):

            path = adcp.iloc[i, -1]
            if path != 'None':
                print adcp.iloc[i, 0]

                coef = ut_solv(time, ua[:, ii], va[:, ii], lonlat[i, 1],
                                cnstit='auto', rmin=0.95, notrend=True,
                            method='ols', nodiagn=True, linci=True,
                               conf_int=True)

                runData = coef

                mod = pd.DataFrame({'ua':ua[:, ii], 'va':va[:, ii]})

                obs_loc = {'name':adcp.iloc[i,0], 'type':'ADCP', 'lat':lonlat[i,-1],
                        'lon':lonlat[0,0], 'obs_timeseries':obs,
                        'mod_timeseries':mod, 'obs_time':adcpTime,
                        'mod_time':time,'speed_obs_harmonics':adcpData,
                        'speed_mod_harmonics':runData}


                struct = np.hstack((struct, obs_loc))

        Struct[Name] = struct

    return Struct
Пример #6
0
# runData = pd.DataFrame()

for i, ii in enumerate(index):

    path = adcp.iloc[i, -1]
    if path != 'None':
        ADCP = pd.read_csv(path, index_col=0)
        ADCP.index = pd.to_datetime(ADCP.index)

        adcpTime = np.empty(ADCP.index.shape)

        for j, jj in enumerate(ADCP.index):
            adcpTime[j] = datetime2matlabdn(jj)

        adcpCoef = ut_solv(time, ua[:, ii], va[:, ii], uvnodell[ii, 1],
                           'auto', Rayleigh[0], 'NoTrend', 'Rmin', 'OLS',
                           'NoDiagn', 'LinCI')

        adcpAUX = adcpCoef['aux']
        del adcpAUX['opt']
        del adcpCoef['aux']

        adcpAUX = pd.DataFrame(adcpAUX)
        a = pd.DataFrame(adcpCoef)
        a = pd.concat([a, adcpAUX], axis=1)
        # a['aux'] = pd.Series(a['aux'])

        nameSpacer = pd.DataFrame({'ADCP_Location': [adcp.iloc[i, 0]]})
        adcpData = pd.concat([adcpData, nameSpacer])
        adcpData = pd.concat([adcpData, a])
Пример #7
0
    def harmonics(self, **kwarg):

        self.coef = ut_solv(self.time,
                            self.elev, [],
                            self.lat, **kwarg)
Пример #8
0
def main(fvFiles, adcpFiles, tideFiles, isStation=True, debug=False):

    #fvdebugData = FVCOM(fvdebug)
    #saveName = 'validationStruct.p'
    #Name = 'june_2013_3D_station'
    #Struct = {}

    for fvFile in fvFiles:
        print fvFile
        struct = np.array([])
        for adcpFile in adcpFiles:
            print adcpFile
            adcpData = ADCP(adcpFile)
            lonlat = np.array([adcpData.lon[0], adcpData.lat[0]]).T

            print adcpData.mtime.shape
            print adcpData.ua.shape
            print adcpData.va.shape
            print adcpData.surf.shape

            adcpVelCoef = ut_solv(adcpData.mtime, adcpData.ua,
                            adcpData.va, adcpData.lat[0],
                            cnstit='auto', rmin=0.95, notrend=True,
                            method='ols', nodiagn=True, linci=True, coef_int=True)

            adcpElevCoef = ut_solv(adcpData.mtime, adcpData.surf,
                            [], adcpData.lat[0],
                            cnstit='auto', rmin=0.95, notrend=True,
                            method='ols', nodiagn=True, linci=True, coef_int=True)

            #adcpName = adcpFile.split('/')[-1].split('.')[0]

            adcp_obs = {'ua':adcpData.ua,
                        'va':adcpData.va,
                        'elev':adcpData.surf,
                        'u':adcpData.east_vel,
                        'v':adcpData.north_vel,
                        'bins':adcpData.bins}

    #        adcp_obs = pd.DataFrame({'ua':adcpData.ua,
    #                                 'va':adcpData.va,
    #                                 'elev':adcpData.surf,
    #                                 'u':adcpData.east_vel,
    #                                 'v':adcpData.north_vel})


            print fvFile
            saveName = fvFile + 'validationStruct.p'
            if isStation:
                fvData = station(fvFile)
                ind = closest_point(lonlat, fvData.lon, fvData.lat)
            else:
                #ax = np.array([adcpData.lon[0], adcpData.lat[0]]).T
                ax = [[adcpData.lon[0][0]], [adcpData.lat[0][0]]]
                #ax = [adcpData.lon[0][0], adcpData.lat[0][0]]
                fvData = FVCOM(fvFile, ax)
                #print ax
    #                lonlat = np.array([[adcpData.lon[0][0],
    #                                   adcpData.lat[0][0]]])
    #                ind = closest_point(lonlat, fvData.lon, fvData.lat)
    #                print ind

    #                ind = fvData.closest_point([adcpData.lon[0][0]],
    #                                           [adcpData.lat[0][0]])


            # right one
            #ind = closest_point(lonlat, fvData.lon, fvData.lat)

            #lonlat = np.array([adcpData.x[0], adcpData.y[0]]).T
            #newind = closest_point(lonlat, fvdebugData.lonc, fvdebugData.latc)
            #ind = closest_point(lonlat, fvData.x, fvData.y)
            #new = np.array([fvdebugData.xc[newind], fvdebugData.yc[newind]])
            #ind = closest_point(new.T, fvData.x, fvData.y)


            if isStation:
                fvVelCoef = ut_solv(fvData.time, fvData.ua[:, ind].flatten(),
                                    fvData.va[:, ind].flatten(),
                            adcpData.lat[0], cnstit='auto', rmin=0.95, notrend=True,
                            method='ols', nodiagn=True, linci=True, conf_int=True)

                print fvData.elev[:, ind].shape
                fvElevCoef = ut_solv(fvData.time, fvData.elev[:, ind].flatten(), [],
                            adcpData.lat[0], cnstit='auto', rmin=0.95, notrend=True,
                            method='ols', nodiagn=True, linci=True, conf_int=True)

                mod = {'ua':fvData.ua[:, ind].flatten(),
                        'va':fvData.va[:, ind].flatten(),
                        'elev':fvData.elev[:, ind].flatten(),
                        'u':fvData.u,
                        'v':fvData.v}
            else:
                fvVelCoef = ut_solv(fvData.time, fvData.ua.flatten(),
                                    fvData.va.flatten(),
                            adcpData.lat[0], cnstit='auto', rmin=0.95, notrend=True,
                            method='ols', nodiagn=True, linci=True, conf_int=True)

                #print fvData.elev[:, ind].shape
                fvElevCoef = ut_solv(fvData.time, fvData.elev.flatten(), [],
                            adcpData.lat[0], cnstit='auto', rmin=0.95, notrend=True,
                            method='ols', nodiagn=True, linci=True, conf_int=True)

                if fvData.D3:
                    mod = {'ua':fvData.ua.flatten(),
                            'va':fvData.va.flatten(),
                            'elev':fvData.elev.flatten(),
                            'u':fvData.u,
                            'v':fvData.v}
                else:
                    mod = {'ua':fvData.ua.flatten(),
                            'va':fvData.va.flatten(),
                            'elev':fvData.elev.flatten()}



            obs_loc = {'name': adcpFile,
                        'type':'ADCP',
                        'lat':adcpData.lat[0],
                        'lon':adcpData.lon[0],
                        'obs_timeseries':adcp_obs,
                        'mod_timeseries':mod,
                        'obs_time':adcpData.mtime,
                        'mod_time':fvData.time,
                        'vel_obs_harmonics':adcpVelCoef,
                        'elev_obs_harmonics':adcpElevCoef,
                        'vel_mod_harmonics':fvVelCoef,
                        'elev_mod_harmonics':fvElevCoef}
                        #'adcp_bins':adcpData.bins}

    #            obs_loc = {'name': adcpName, 'type':'ADCP', 'lat':fvdebugData.lat[newind],
    #                    'lon':fvdebugData.lon[newind], 'obs_timeseries':adcp_obs,
    #                    'mod_timeseries':mod, 'obs_time':adcpData.mtime,
    #                    'mod_time':fvData.time, 'vel_obs_harmonics':adcpVelCoef,
    #                    'elev_obs_harmonics':adcpElevCoef,
    #                    'vel_mod_harmonics':fvVelCoef, 'elev_mod_harmonics':fvElevCoef}

            struct = np.hstack((struct, obs_loc))


    #for fvFile in fvFiles:
        for tideFile in tideFiles:

            print tideFile

            tideData = Tidegauge(tideFile)
            ut_constits = ['M2','S2','N2','K2','K1','O1','P1','Q1']
            tideData.harmonics(cnstit=ut_constits, notrend=True,
                            rmin=0.95, method='ols', nodiagn=True, linci=True,
                            ordercnstit='frq')

            tide_obs = {'data':tideData.data, 'elev':tideData.elev}


            print fvFile

            if isStation:
                fvData = station(fvFile)
                ind = np.argmin(np.sqrt((fvData.lon-tideData.lon)**2+(fvData.lat-tideData.lat)**2))
                #ind = closest_point(lonlat, fvData.lon, fvData.lat)
            else:
                #ax = np.array([adcpData.lon[0], adcpData.lat[0]]).T
                ax = [[tideData.lon], [tideData.lat]]
                fvData = FVCOM(fvFile, ax)

            if isStation:

                print fvData.elev[:, ind].shape
                fvElevCoef = ut_solv(fvData.time, fvData.elev[:, ind].flatten(), [],
                            adcpData.lat[0], cnstit='auto', rmin=0.95, notrend=True,
                            method='ols', nodiagn=True, linci=True, conf_int=True)

                mod = {'ua':fvData.ua[:, ind].flatten(),
                        'va':fvData.va[:, ind].flatten(),
                        'elev':fvData.elev[:, ind].flatten(),
                        'u':fvData.u,
                        'v':fvData.v}
            else:

                #print fvData.elev[:, ind].shape
                fvElevCoef = ut_solv(fvData.time, fvData.elev.flatten(), [],
                            adcpData.lat[0], cnstit='auto', rmin=0.95, notrend=True,
                            method='ols', nodiagn=True, linci=True, conf_int=True)

                if fvData.D3:
                    mod = {'ua':fvData.ua.flatten(),
                            'va':fvData.va.flatten(),
                            'elev':fvData.elev.flatten(),
                            'u':fvData.u,
                            'v':fvData.v}
                else:
                    mod = {'ua':fvData.ua.flatten(),
                            'va':fvData.va.flatten(),
                            'elev':fvData.elev.flatten()}



            obs_loc = {'name':tideFile, 'type':'TideGauge',
                        'mod_time':fvData.time,
                        'obs_time':tideData.time,
                        'lon':tideData.lon,
                        'lat':tideData.lat,
                        'elev_obs_harmonics':tideData.coef,
                        'elev_mod_harmonics': fvElevCoef,
                        'obs_timeseries':tide_obs,
                        'mod_timeseries':mod}


            saveName = os.path.dirname(fvFile) + '/validationStruct.p'
            print 'SAVENAME'
            print saveName
            struct = np.hstack((struct, obs_loc))

        pickle.dump(struct, open(saveName, "wb"))
Пример #9
0
    path = adcp.iloc[i, -1]
    if path != 'None':
        print adcp.iloc[i, 0]
        #print lonlat[i,1], uvnodell[ii,1]

        ADCP = pd.read_csv(path, index_col=0)
        ADCP.index = pd.to_datetime(ADCP.index)

        adcpTime = np.empty(ADCP.index.shape)

        for j, jj in enumerate(ADCP.index):
            adcpTime[j] = datetime2matlabdn(jj)

        adcpCoef = ut_solv(adcpTime, ADCP['u'].values, ADCP['v'].values, lonlat[i, 1],
                            cnstit='auto', rmin=Rayleigh[0], notrend=True,
                            method='ols', nodiagn=True, linci=True,
                            conf_int=False)

#        adcpAUX = adcpCoef['aux']
#        del adcpAUX['opt']
#        del adcpCoef['aux']

#        adcpAUX = pd.DataFrame(adcpAUX)
#        a = pd.DataFrame(adcpCoef)
#        size = a.shape[0]
#        nameSpacer = pd.DataFrame({'ADCP_Location': np.repeat(adcp.iloc[i, 0],
#                                                                size)})
#
#        cat = pd.concat([a, adcpAUX, nameSpacer], axis=1)
#
#        adcpData = cat.set_index('ADCP_Location')
Пример #10
0
    def Harmonic_analysis_at_point(self,
                                   pt_lon,
                                   pt_lat,
                                   time_ind=[],
                                   t_start=[],
                                   t_end=[],
                                   elevation=True,
                                   velocity=False,
                                   debug=False,
                                   **kwarg):
        '''
        Description:
        -----------
        This function performs a harmonic analysis on the sea surface elevation
        time series or the velocity components timeseries.

        Inputs:
        ------
          - pt_lon = longitude in decimal degrees East, float number
          - pt_lat = latitude in decimal degrees North, float number

        Outputs:
        -------
          - harmo = harmonic coefficients, dictionary

        Keywords:
        --------
          - time_ind = time indices to work in, list of integers
          - t_start = start time, as a string ('yyyy-mm-ddThh:mm:ss'),
                     or time index as an integer
          - t_end = end time, as a string ('yyyy-mm-ddThh:mm:ss'),
                    or time index as an integer
          - elevation=True means that ut_solv will be done for elevation.
          - velocity=True means that ut_solv will be done for velocity.

        Options:
        -------
        Options are the same as for ut_solv, which are shown below with
        their default values:
            conf_int=True; cnstit='auto'; notrend=0; prefilt=[]; nodsatlint=0;
            nodsatnone=0; gwchlint=0; gwchnone=0; infer=[]; inferaprx=0;
            rmin=1; method='cauchy'; tunrdn=1; linci=0; white=0; nrlzn=200;
            lsfrqosmp=1; nodiagn=0; diagnplots=0; diagnminsnr=2;
            ordercnstit=[]; runtimedisp='yyy'

        Notes:
        -----
        For more detailed information about ut_solv, please see
        https://github.com/wesleybowman/UTide

        '''
        debug = (debug or self._debug)
        #TR_comments: Add debug flag in Utide: debug=self._debug
        index = closest_point([pt_lon], [pt_lat],
                              self._grid.lonc,
                              self._grid.latc,
                              debug=debug)[0]
        argtime = []
        if not time_ind == []:
            argtime = time_ind
        elif not t_start == []:
            if type(t_start) == str:
                argtime = time_to_index(t_start,
                                        t_end,
                                        self._var.matlabTime,
                                        debug=debug)
            else:
                argtime = arange(t_start, t_end)

        if velocity:
            time = self._var.matlabTime[:]
            u = self.interpolation_at_point(self._var.ua,
                                            pt_lon,
                                            pt_lat,
                                            index=index,
                                            debug=debug)
            v = self.interpolation_at_point(self._var.va,
                                            pt_lon,
                                            pt_lat,
                                            index=index,
                                            debug=debug)
            if not argtime == []:
                time = time[argtime[:]]
                u = u[argtime[:]]
                v = v[argtime[:]]

            lat = self._grid.lat[index]
            harmo = ut_solv(time, u, v, lat, **kwarg)

        if elevation:
            time = self._var.matlabTime[:]
            el = self.interpolation_at_point(self._var.el,
                                             pt_lon,
                                             pt_lat,
                                             index=index,
                                             debug=debug)

            if not argtime == []:
                time = time[argtime[:]]
                el = el[argtime[:]]

            lat = self._grid.lat[index]
            harmo = ut_solv(time, el, [], lat, **kwarg)
            #Write meta-data only if computed over all the elements

            return harmo
Пример #11
0
def getData():
    '''
    Extracts data and stores it in a pickle file. I'll write a better
    comment later.
    '''

    # filename = '/home/wesley/github/aidan-projects/grid/dngrid_0001.nc'
    # filename = '/home/abalzer/scratch/standard_run_directory/0.0015/output/dngrid_0001.nc'
    # filename = '/home/wesley/ncfiles/smallcape_force_0001.nc'
    # filename = '/home/abalzer/standard_run_directory/0.0015/output/dngrid_0001.nc'
    filename = '/array/data1/rkarsten/dncoarse_bctest/output/dn_coarse_0001.nc'

    data = nc.Dataset(filename, 'r')
    x = data.variables['x'][:]
    y = data.variables['y'][:]
    lon = data.variables['lon'][:]
    lat = data.variables['lat'][:]
    ua = data.variables['ua']
    va = data.variables['va']
    time = data.variables['time'][:]
    trinodes = data.variables['nv'][:]
    #h = data.variables['zeta'][:]
    
    (nodexy, uvnodexy, dt, deltat,
     hour, thour, TP, rho, g, period,
     nodell, uvnodell, trinodes) = ncdatasort(x, y, time*24*3600,
                                              trinodes, lon, lat)
    
    time = mjd2num(time)
    
    Rayleigh = np.array([1])
    
    # adcpFilename = '/home/wesley/github/karsten/adcp/dngrid_adcp_2012.txt'
    # adcpFilename = '/home/wesley/github/karsten/adcp/testADCP.txt'
    # adcpFilename = '/home/wesleyb/github/karsten/adcp/dngrid_adcp_2012.txt'
    adcpFilename = '/array/home/116822s/github/karsten/acadia_dngrid_adcp_2012.txt'
    adcp = pd.read_csv(adcpFilename)
    
    lonlat = np.array([adcp['Longitude'], adcp['Latitude']]).T
    
    index = closest_point(lonlat, lon, lat)
    
    # set up lists for output data, grab shape values
    adcp_out_u, adcp_out_v = [], []
    fvc_out_u, fvc_out_v = [], []

    for i, ii in enumerate(index):
        tmp_path = adcp.iloc[i, -1]
        if tmp_path != 'None':
    	    tmp_adcp = pd.read_csv(tmp_path, index_col=0)
            adcp_size = tmp_adcp['u'].values.size
    
            adcp_out_u.append(np.zeros(adcp_size))
    	    adcp_out_v.append(np.zeros(adcp_size))
        else:
    	    adcp_out_u.append('Nothing!')
    	    adcp_out_v.append('Nothing!')
    
    fvc_out_u, fvc_out_v = np.zeros([index.size, ua[:, 0].size]), \
    		       np.zeros([index.size, ua[:, 0].size])
    adcp_start, adcp_end, adcp_step = [], [], []

    # main loop, loads in data
    for i, ii in enumerate(index):
    
        path = adcp.iloc[i, -1]
        if path != 'None':
            ADCP = pd.read_csv(path, index_col=0)
            ADCP.index = pd.to_datetime(ADCP.index)
   
    	    adcp_out_u[i] = ADCP['u'].values
    	    adcp_out_v[i] = ADCP['v'].values
    	    adcp_start.append(ADCP.index[0].to_datetime())
	    adcp_end.append(ADCP.index[-1].to_datetime())
	    adcp_step.append(ADCP.index[1].to_datetime() - adcp_start[i])

    	    fvc_out_u[i] = ua[:, ii]
    	    fvc_out_v[i] = va[:, ii]
	else:
	    adcp_start.append('Nothing!')
	    adcp_end.append('Nothing!')
    
    # remove all those 'Nothing's created from empty data
    adcp_out_u = [i for i in adcp_out_u if i != 'Nothing!']
    adcp_out_v = [i for i in adcp_out_v if i != 'Nothing!']
    adcp_start = [i for i in adcp_start if i != 'Nothing!']
    adcp_end = [i for i in adcp_end if i != 'Nothing!']

    # set up times
    f_start = time[0]
    f_end = time[-1]
    f_start = datetime.fromordinal(int(f_start)) + \
	      timedelta(days=(f_start%1)) - timedelta(days=366)
    f_end = datetime.fromordinal(int(f_end)) + timedelta(days=(f_end%1)) - \
	    timedelta(days=366)
    f_step = datetime.fromordinal(int(time[1])) + \
	     timedelta(days=(time[1]%1)) - timedelta(days=366) - f_start

    # put together dictionaries, ready for interpolation/smoothing
    adcp_dicts = []
    fvc_dicts = []
    for i in np.arange(len(adcp_start)):
	adcp = {}
	adcp['start'] = adcp_start[i]
	adcp['end'] = adcp_end[i]
	adcp['step'] = adcp_step[i]
	adcp['pts'] = np.sqrt(adcp_out_u[i]**2 + adcp_out_v[i]**2)
	adcp_dicts.append(adcp)

	fvc = {}
	fvc['start'] = f_start
	fvc['end'] = f_end
	fvc['step'] = f_step
	fvc['pts'] = np.sqrt(fvc_out_u[i]**2 + fvc_out_v[i]**2)
	fvc_dicts.append(fvc)

    # load data into file using pickle
    filename_1 = '/array/home/116822s/tidal_data/stats_test/ADCP_data1.pkl'
    out_adcp = open(filename_1, 'wb')
    pickle.dump(adcp_dicts, out_adcp)

    filename_2 = '/array/home/116822s/tidal_data/stats_test/FVCOM_data1.pkl'
    out_fvc = open(filename_2, 'wb')
    pickle.dump(fvc_dicts, out_fvc)

    out_adcp.close()
    out_fvc.close()

    # start getting the harmonic data
    for i in np.arange(len(fvc_dicts)):
        order = ['M2','S2','N2','K2','K1','O1','P1','Q1']
        
	print 'Getting harmonic data for new site'

        coef = ut_solv(time, ua[:, ii], va[:, ii], uvnodell[ii, 1],
                        cnstit=order, rmin=Rayleigh[0], notrend=True, method='ols',
                        nodiagn=True, linci=True, conf_int=True,
                        ordercnstit='frq')
    	    
	# create time array for output time series
	start = adcp_dicts[i]['start']
	step = adcp_dicts[i]['step']
	num_steps = adcp_dicts[i]['pts'].size

	series = start + np.arange(num_steps) * step
	for v, vv in enumerate(series):
	    series[v] = datetime2matlabdn(vv)
   
	t = series.astype(float)

	# reconstruct the time series using adcp times
	time_series = np.asarray(ut_reconstr(t, coef))
	time_series = np.sqrt(time_series[0]**2 + time_series[1]**2)
	# ASK WESLEY WHAT THIS RETURNS, my idea might not be correct

	print np.where((time_series[1] - time_series[0]) != 0)

	fvc_dicts[i]['start'] = start
	fvc_dicts[i]['end'] = adcp_dicts[i]['end']
	fvc_dicts[i]['step'] = step
	fvc_dicts[i]['pts'] = time_series
	
    # save harmonic data    
    filename_3 = '/array/home/116822s/tidal_data/stats_test/hindcast_1.pkl'
    out_hind = open(filename_3, 'wb')
    pickle.dump(fvc_dicts, out_hind)
    out_hind.close()

    print 'Done!'
Пример #12
0
def main(debug=False):
    if debug:
        #datafiles = ['/array/data1/rkarsten/dncoarse_bctest_old/output/dn_coarse_0001.nc',
        #            '/array/data1/rkarsten/dncoarse_bctest/output/dn_coarse_0001.nc']
        fvFiles = ['/home/wesley/ncfiles/smallcape_force_0001.nc']
    else:

        #fvFile = '/EcoII/EcoEII_server_data_tree/data/simulated/FVCOM/dngrid/june_2013_3D/output/'
        #fvFiles = ['/EcoII/EcoEII_server_data_tree/data/simulated/FVCOM/dngrid/june_2013_3D/output/']
        fvFiles = ['/EcoII/EcoEII_server_data_tree/workspace/simulated/FVCOM/dngrid/june_2013_3D/output/']
        #adcpFile = '/EcoII/EcoEII_server_data_tree/data/observed/GP/ADCP/Flow_GP-130620-BPa_avg5.mat'
        #adcpFile = '/EcoII/EcoEII_server_data_tree/data/observed/GP/ADCP/Flow_GP-130620-BPb_avg5.mat'
        adcpFiles = ['/EcoII/EcoEII_server_data_tree/data/observed/GP/ADCP/Flow_GP-130620-BPa_avg5.mat',
         '/EcoII/EcoEII_server_data_tree/data/observed/GP/ADCP/Flow_GP-130620-BPb_avg5.mat']
	fvdebug = '/EcoII/EcoEII_server_data_tree/workspace/simulated/FVCOM/dngrid/june_2013_3D/output/dngrid_0001_week2.nc'

    fvdebugData = FVCOM(fvdebug)
    saveName = 'june_2013_3D_station.p'
    Name = 'june_2013_3D_station'
    Struct = {}

    for fvFile in fvFiles:

        print fvFile
        fvData = station(fvFile)

        struct = np.array([])
        for adcpFile in adcpFiles:
            print adcpFile
            adcpData = ADCP(adcpFile)
            lonlat = np.array([adcpData.lon[0], adcpData.lat[0]]).T
            #lonlat = np.array([adcpData.x[0], adcpData.y[0]]).T
            #ind = closest_point(lonlat, fvData.lon, fvData.lat)
            newind = closest_point(lonlat, fvdebugData.lonc, fvdebugData.latc)
            #ind = closest_point(lonlat, fvData.x, fvData.y)
            new = np.array([fvdebugData.xc[newind], fvdebugData.yc[newind]])
            ind = closest_point(new.T, fvData.x, fvData.y)

            print ind
            print adcpData.mtime.shape
            print adcpData.ua.shape
            print adcpData.va.shape
            print adcpData.surf.shape

            adcpVelCoef = ut_solv(adcpData.mtime, adcpData.ua,
                            adcpData.va, adcpData.lat[0],
                            cnstit='auto', rmin=0.95, notrend=True,
                            method='ols', nodiagn=True, linci=True, coef_int=True)

            adcpElevCoef = ut_solv(adcpData.mtime, adcpData.surf,
                            [], adcpData.lat[0],
                            cnstit='auto', rmin=0.95, notrend=True,
                            method='ols', nodiagn=True, linci=True, coef_int=True)

            adcpName = adcpFile.split('/')[-1].split('.')[0]
            #WB_COMMENT: Doesn't currently work
            obs = pd.DataFrame({'u':adcpData.ua, 'v':adcpData.va, 'elev':adcpData.surf})

            print fvData.time.shape
            print fvData.ua[:, ind].shape
            print fvData.va[:, ind].shape
            print fvData.lat[ind].shape

            fvVelCoef = ut_solv(fvData.time, fvData.ua[:, ind].flatten(),
                                fvData.va[:, ind].flatten(),
                        adcpData.lat[0], cnstit='auto', rmin=0.95, notrend=True,
                        method='ols', nodiagn=True, linci=True, conf_int=True)

            print fvData.elev[:, ind].shape
            fvElevCoef = ut_solv(fvData.time, fvData.elev[:, ind].flatten(), [],
                        adcpData.lat[0], cnstit='auto', rmin=0.95, notrend=True,
                        method='ols', nodiagn=True, linci=True, conf_int=True)

            mod = pd.DataFrame({'ua':fvData.ua[:, ind].flatten(),
                                'va':fvData.va[:, ind].flatten(),
                                'elev':fvData.elev[:, ind].flatten()})


            obs_loc = {'name': adcpName, 'type':'ADCP', 'lat':fvdebugData.lat[newind],
                    'lon':fvdebugData.lon[newind], 'obs_timeseries':obs,
                    'mod_timeseries':mod, 'obs_time':adcpData.mtime,
                    'mod_time':fvData.time, 'vel_obs_harmonics':adcpVelCoef,
                    'elev_obs_harmonics':adcpElevCoef,
                    'vel_mod_harmonics':fvVelCoef, 'elev_mod_harmonics':fvElevCoef}

            struct = np.hstack((struct, obs_loc))

        Struct[Name] = struct

    if debug:
        pickle.dump(Struct, open("structADCP.p", "wb"))

    pickle.dump(Struct, open(saveName, "wb"))
    return Struct
Пример #13
0
    def validate_harmonics(self, filename=[], save_csv=False,
                           debug=False, debug_plot=False):
        """
        This method computes and store in a csv file the error in %
        for each component of the harmonic analysis (i.e. *_error.csv).     

        Options:
        ------
          - filename: file name of the .csv file to be saved, string.
          - save_csv: will save both observed and modeled harmonic
                      coefficients into *.csv files (i.e. *_harmo_coef.csv) 
        """
        #User input
        if filename==[]:
            filename = input('Enter filename (string) for csv file: ')
            filename = str(filename)


        #Harmonic analysis over matching time
        if self.Variables._obstype=='adcp':
            time = self.Variables.struct['obs_time']
            lat = self.Variables.struct['lat']
            ua =  self.Variables.struct['obs_timeseries']['ua'][:]
            va =  self.Variables.struct['obs_timeseries']['va'][:]
            el =  self.Variables.struct['obs_timeseries']['elev'] [:]          
            
            self.Variables.obs.velCoef = ut_solv(time, ua, va, lat,
                                         #cnstit=ut_constits, rmin=0.95, notrend=True,
                                         cnstit='auto', rmin=0.95, notrend=True,
                                         method='ols', nodiagn=True, linci=True,
                                         coef_int=True)
            

            self.Variables.obs.elCoef = ut_solv(time, el, [], lat,
                                        #cnstit=ut_constits, rmin=0.95, notrend=True,
                                        cnstit='auto', rmin=0.95, notrend=True,
                                        method='ols', nodiagn=True, linci=True,
                                        coef_int=True)

        elif self.Variables._obstype=='tidegauge':
            time = self.Variables.struct['obs_time']
            lat = self.Variables.struct['lat']
            el =  self.Variables.struct['obs_timeseries']['elev'] [:]
 
            self.Variables.obs.elCoef = ut_solv(time, el, [], lat,
                                        #cnstit=ut_constits, notrend=True,
                                        cnstit='auto', notrend=True,
                                        rmin=0.95, method='ols', nodiagn=True,
                                        #linci=True, ordercnstit='frq')
                                        linci=True, coef_int=True)
        else:
            print "--This type of observations is not supported---"
            sys.exit()

        if self.Variables._simtype=='fvcom':
            time = self.Variables.struct['mod_time']
            lat = self.Variables.struct['lat']
            el =  self.Variables.struct['mod_timeseries']['elev'][:]           
            
            self.Variables.sim.elCoef = ut_solv(time, el, [], lat,
                             #cnstit=ut_constits, rmin=0.95, notrend=True,
                             cnstit='auto', rmin=0.95, notrend=True,
                             method='ols', nodiagn=True, linci=True, conf_int=True)
            if self.Variables._obstype=='adcp':
                ua =  self.Variables.struct['mod_timeseries']['ua'][:]
                va =  self.Variables.struct['mod_timeseries']['va'][:]
                self.Variables.sim.velCoef = ut_solv(time, ua, va, lat,
                                  #cnstit=ut_constits, rmin=0.95, notrend=True,
                                  cnstit='auto', rmin=0.95, notrend=True,
                                  method='ols', nodiagn=True, linci=True, conf_int=True)

        elif self.Variables._simtype=='station':
            time = self.Variables.struct['mod_time']
            lat = self.Variables.struct['lat']
            el = self.Variables.struct['mod_timeseries']['elev'][:]

            self.Variables.sim.elCoef = ut_solv(time, el, [], lat,
                             #cnstit=ut_constits, rmin=0.95, notrend=True,
                             cnstit='auto', rmin=0.95, notrend=True,
                             method='ols', nodiagn=True, linci=True, conf_int=True)
            if self.Variables._obstype=='adcp':
                ua = self.Variables.struct['mod_timeseries']['ua'][:]
                va = self.Variables.struct['mod_timeseries']['va'][:]
                self.Variables.sim.velCoef = ut_solv(time, ua, va, lat,
                                  #cnstit=ut_constits, rmin=0.95, notrend=True,
                                  cnstit='auto', rmin=0.95, notrend=True,
                                  method='ols', nodiagn=True, linci=True, conf_int=True)

        #find matching and non-matching coef
        matchElCoef = []
        matchElCoefInd = []
        for i1, key1 in enumerate(self.Variables.sim.elCoef['name']):
            for i2, key2 in enumerate(self.Variables.obs.elCoef['name']):
                if key1 == key2:
                   matchElCoefInd.append((i1,i2))
                   matchElCoef.append(key1)
        matchElCoefInd=np.array(matchElCoefInd)
        noMatchElCoef = np.delete(self.Variables.sim.elCoef['name'],
                                  matchElCoefInd[:,0])
        np.hstack((noMatchElCoef,np.delete(self.Variables.obs.elCoef['name'],
                   matchElCoefInd[:,1]) ))

        matchVelCoef = []
        matchVelCoefInd = []
        try:
	    for i1, key1 in enumerate(self.Variables.sim.velCoef['name']):
	        for i2, key2 in enumerate(self.Variables.obs.velCoef['name']):
	            if key1 == key2:
		        matchVelCoefInd.append((i1,i2))
		        matchVelCoef.append(key1)
	    matchVelCoefInd=np.array(matchVelCoefInd)
	    noMatchVelCoef = np.delete(self.Variables.sim.velCoef['name'],
				       matchVelCoefInd[:,0])
	    np.hstack((noMatchVelCoef,np.delete(self.Variables.obs.velCoef['name'],
                       matchVelCoefInd[:,1]) ))
        except AttributeError:
            pass


        #Compare obs. vs. sim. elevation harmo coef
        data = {}
        columns = ['A', 'g', 'A_ci', 'g_ci']

        #Store harmonics in csv files 
        if save_csv:
            #observed elevation coefs
            for key in columns:
                data[key] = self.Variables.obs.elCoef[key]           
            table = pd.DataFrame(data=data, index=self.Variables.obs.elCoef['name'],
                                 columns=columns)
            ##export as .csv file
            out_file = '{}_obs_el_harmo_coef.csv'.format(filename)
            table.to_csv(out_file)            
            data = {}

            #modeled elevation coefs
            for key in columns:
                data[key] = self.Variables.sim.elCoef[key]           
            table = pd.DataFrame(data=data, index=self.Variables.sim.elCoef['name'],
                                 columns=columns)
            ##export as .csv file
            out_file = '{}_sim_el_harmo_coef.csv'.format(filename)
            table.to_csv(out_file)            
            data = {}

        ##error in %
        if not matchElCoef==[]:
            for key in columns:
                b=self.Variables.sim.elCoef[key][matchElCoefInd[:,0]]
                a=self.Variables.obs.elCoef[key][matchElCoefInd[:,1]]
                err = abs((a-b)/a) * 100.0
                data[key] = err

            ##create table
            table = pd.DataFrame(data=data, index=matchElCoef, columns=columns)        
            ##export as .csv file
            out_file = '{}_el_harmo_error.csv'.format(filename)
            table.to_csv(out_file)
            ##print non-matching coefs
            if not noMatchElCoef.shape[0]==0:
                print "Non-matching harmonic coefficients for elevation: ", noMatchElCoef
        else:
            print "-No matching harmonic coefficients for elevation-" 

        #Compare obs. vs. sim. velocity harmo coef
        data = {}
        columns = ['Lsmaj', 'g', 'theta_ci', 'Lsmin_ci',
                   'Lsmaj_ci', 'theta', 'g_ci']
 
        #Store harmonics in csv files 
        if save_csv:
            #observed elevation coefs
            for key in columns:
                data[key] = self.Variables.obs.velCoef[key]          
            table = pd.DataFrame(data=data, index=self.Variables.obs.velCoef['name'],
                                 columns=columns)
            ##export as .csv file
            out_file = '{}_obs_velo_harmo_coef.csv'.format(filename)
            table.to_csv(out_file)            
            data = {}

            #modeled elevation coefs
            for key in columns:
                data[key] = self.Variables.sim.velCoef[key]           
            table = pd.DataFrame(data=data, index=self.Variables.sim.velCoef['name'],
                                 columns=columns)
            ##export as .csv file
            out_file = '{}_sim_velo_harmo_coef.csv'.format(filename)
            table.to_csv(out_file)            
            data = {}

        ##error in %
        if not matchVelCoef==[]:
            for key in columns:
                b=self.Variables.sim.velCoef[key][matchVelCoefInd[:,0]]
                a=self.Variables.obs.velCoef[key][matchVelCoefInd[:,1]]
                err = abs((a-b)/a) * 100.0
                data[key] = err

            ##create table
            table = pd.DataFrame(data=data, index=matchVelCoef, columns=columns)        
            ##export as .csv file
            out_file = '{}_vel0_harmo_error.csv'.format(filename)
            table.to_csv(out_file)
            ##print non-matching coefs
            if not noMatchVelCoef.shape[0]==0:
                print "Non-matching harmonic coefficients for velocity: ", noMatchVelCoef
        else:
            print "-No matching harmonic coefficients for velocity-"      
Пример #14
0
def compareUV(data):
    '''
    Does a comprehensive validation process between modeled and observed
    data on the following:
        Current speed
        Current direction
        Harmonic constituents (for height and speed)

    Outputs a list of important statistics for each variable, calculated
    using the TidalStats class
    '''
    # take data from input dictionary
    mod_time = data['mod_time']
    obs_time = data['obs_time']
    print 'Loading mod timeseries'
    mod_u_all = data['mod_timeseries']['u'][:, :, 0]
    mod_v_all = data['mod_timeseries']['v'][:, :, 0]
    mod_el = data['mod_timeseries']['elev']
    print 'Loading obs timeseries'
    obs_u_all = data['obs_timeseries']['u']
    obs_v_all = data['obs_timeseries']['v']
    obs_el = data['obs_timeseries']['elev']
    v_obs_harm = data['vel_obs_harmonics']
    el_mod_harm = data['elev_mod_harmonics']
    el_obs_harm = data['elev_mod_harmonics']
    bins = data['obs_timeseries']['bins']
    sig = -data['mod_timeseries']['siglay'][:, 0]

    # for some reason, the siglayers are repeated within siglay
    # this bit of code will pick out only one of those repetitions
    siglay = []
    for i, v in enumerate(sig):
	siglay.append(v)
	if (sig[i + 1] < v):
	    break
    siglay = np.asarray(siglay)

    print 'siglay: {}'.format(siglay)
    print 'mod shape: {}'.format(mod_u_all.shape)
    print 'obs shape: {}'.format(obs_u_all.shape)

    # use depth interpolation to get a single timeseries
    print 'Performing depth interpolation'
    mod_depth = mod_el + np.mean(obs_el)
    (mod_u, obs_u) = depthFromSurf(mod_u_all, mod_depth, siglay,
				   obs_u_all, obs_el, bins)
    (mod_v, obs_v) = depthFromSurf(mod_v_all, mod_depth, siglay,
                                   obs_v_all, obs_el, bins)
    print 'Depth interpolation completed'

    # create new coefs based on depth interpolated timeseries
    v_mod_harm = ut_solv(mod_time, mod_u, mod_v, 
                         data['lat'], cnstit='auto', 
			 rmin=0.95, notrend=True, method='ols', nodiagn=True, 
			 linci=True, conf_int=True)

    # convert times to datetime
    mod_dt, obs_dt = [], []
    for i in mod_time:
	mod_dt.append(dn2dt(i))
    for j in obs_time:
	obs_dt.append(dn2dt(j))

    # put data into a useful format
    mod_spd = np.sqrt(mod_u**2 + mod_v**2)
    obs_spd = np.sqrt(obs_u**2 + obs_v**2)
    mod_dir = np.arctan2(mod_v, mod_u) * 180 / np.pi
    obs_dir = np.arctan2(obs_v, obs_u) * 180 / np.pi
    obs_el = obs_el - np.mean(obs_el)

    # check if the modeled data lines up with the observed data
    if (mod_time[-1] < obs_time[0] or obs_time[-1] < mod_time[0]):

	pred_uv = ut_reconstr(obs_time, v_mod_harm)
	pred_uv = np.asarray(pred_uv)
	pred_h = ut_reconstr(obs_time, el_mod_harm)
	pred_h = np.asarray(pred_h)

	# redo speed and direction and set interpolated variables
	mod_sp_int = np.sqrt(pred_uv[0]**2 + pred_uv[1]**2)
	mod_ve_int = mod_sp_int * np.sign(pred_uv[1])
	mod_dr_int = np.arctan2(pred_uv[1], pred_uv[0]) * 180 / np.pi
	mod_el_int = pred_h[0]
	mod_u_int = pred_uv[0]
	mod_v_int = pred_uv[1]
	obs_sp_int = obs_spd
	obs_ve_int = obs_spd * np.sign(obs_v)
	obs_dr_int = obs_dir
	obs_el_int = obs_el
	obs_u_int = obs_u
	obs_v_int = obs_v
	step_int = obs_dt[1] - obs_dt[0]
	start_int = obs_dt[0]

    else:
        # interpolate the data onto a common time step for each data type
	# elevation
        (mod_el_int, obs_el_int, step_int, start_int) = \
	    smooth(mod_el, mod_dt, obs_el, obs_dt)

	# speed
        (mod_sp_int, obs_sp_int, step_int, start_int) = \
            smooth(mod_spd, mod_dt, obs_spd, obs_dt)

	# direction
        (mod_dr_int, obs_dr_int, step_int, start_int) = \
            smooth(mod_dir, mod_dt, obs_dir, obs_dt)

	# u velocity
	(mod_u_int, obs_u_int, step_int, start_int) = \
	    smooth(mod_u, mod_dt, obs_u, obs_dt)

	# v velocity
	(mod_v_int, obs_v_int, step_int, start_int) = \
	    smooth(mod_v, mod_dt, obs_v, obs_dt)

	# velocity i.e. signed speed
	(mod_ve_int, obs_ve_int, step_int, start_int) = \
	    smooth(mod_spd * np.sign(mod_v), mod_dt, 
		   obs_spd * np.sign(obs_v), obs_dt)
    '''
    # separate into ebb and flow
    mod_dir_n = get_DirFromN(mod_u_int, mod_v_int)
    obs_dir_n = get_DirFromN(obs_u_int, mod_v_int)
    mod_signed_s, mod_PA = sign_speed(mod_u_int, mod_v_int, mod_sp_int,
				      mod_dr_int, 0)
    obs_signed_s, obs_PA = sign_speed(obs_u_int, obs_v_int, obs_sp_int,
				      obs_dr_int, 0)
    print mod_signed_s[:20], mod_PA[:20]
    print obs_signed_s[:20], obs_PA[:20]
    '''

    # remove directions where velocities are small
    MIN_VEL = 0.5
    for i in np.arange(obs_sp_int.size):
 	if (obs_sp_int[i] < MIN_VEL):
	    obs_dr_int[i] = np.nan
	if (mod_sp_int[i] < MIN_VEL):
	    mod_dr_int[i] = np.nan

    # get stats for each tidal variable
    elev_suite = tidalSuite(mod_el_int, obs_el_int, step_int, start_int,
			    type='elevation', plot=True)
    speed_suite = tidalSuite(mod_sp_int, obs_sp_int, step_int, start_int,
			    type='speed', plot=True)
    dir_suite = tidalSuite(mod_dr_int, obs_dr_int, step_int, start_int,
			    type='direction', plot=False)
    u_suite = tidalSuite(mod_u_int, obs_u_int, step_int, start_int,
			    type='u velocity', plot=False)
    v_suite = tidalSuite(mod_v_int, obs_v_int, step_int, start_int,
			    type='v velocity', plot=False)
    vel_suite = tidalSuite(mod_ve_int, obs_ve_int, step_int, start_int,
			    type='velocity', plot=False)
    #ebb_suite = tidalSuite(mod_ebb, obs_ebb, step_int, start_int,
	#		    type='ebb', plot=True)
    #flo_suite = tidalSuite(mod_flo, obs_flo, step_int, start_int,
	#		    type='flow', plot=True)
    # output statistics in useful format
    return (elev_suite, speed_suite, dir_suite, u_suite, v_suite, vel_suite)
Пример #15
0
    def Harmonic_analysis_at_point(
        self, station, time_ind=[], t_start=[], t_end=[], elevation=True, velocity=False, debug=False, **kwarg
    ):
        """
        Description:
        -----------
        This function performs a harmonic analysis on the sea surface elevation
        time series or the velocity components timeseries.

        Inputs:
        ------
          - station = either station index (interger) or name (string)

        Outputs:
        -------
          - harmo = harmonic coefficients, dictionary

        Keywords:
        --------
          - time_ind = time indices to work in, list of integers
          - t_start = start time, as a string ('yyyy-mm-ddThh:mm:ss'),
                     or time index as an integer
          - t_end = end time, as a string ('yyyy-mm-ddThh:mm:ss'),
                    or time index as an integer
          - elevation=True means that ut_solv will be done for elevation.
          - velocity=True means that ut_solv will be done for velocity.

        Options:
        -------
        Options are the same as for ut_solv, which are shown below with
        their default values:
            conf_int=True; cnstit='auto'; notrend=0; prefilt=[]; nodsatlint=0;
            nodsatnone=0; gwchlint=0; gwchnone=0; infer=[]; inferaprx=0;
            rmin=1; method='cauchy'; tunrdn=1; linci=0; white=0; nrlzn=200;
            lsfrqosmp=1; nodiagn=0; diagnplots=0; diagnminsnr=2;
            ordercnstit=[]; runtimedisp='yyy'

        Notes:
        -----
        For more detailed information about ut_solv, please see
        https://github.com/wesleybowman/UTide

        """
        debug = debug or self._debug

        # Search for the station
        index = self.search_index(station)

        argtime = []
        if not time_ind == []:
            argtime = time_ind
        elif not t_start == []:
            if type(t_start) == str:
                argtime = time_to_index(t_start, t_end, self._var.matlabTime, debug=debug)
            else:
                argtime = arange(t_start, t_end)

        if velocity:
            time = self._var.matlabTime[:]
            u = self._var.ua[:, index]
            v = self._var.va[:, index]

            if not argtime == []:
                time = time[argtime[:]]
                u = u[argtime[:]]
                v = v[argtime[:]]

            lat = self._grid.lat[index]
            harmo = ut_solv(time, u, v, lat, **kwarg)

        if elevation:
            time = self._var.matlabTime[:]
            el = self._var.el[:, index]

            if not argtime == []:
                time = time[argtime[:]]
                el = el[argtime[:]]

            lat = self._grid.lat[index]
            harmo = ut_solv(time, el, [], lat, **kwarg)
            # Write meta-data only if computed over all the elements

            return harmo
Пример #16
0
def main(fvFiles, adcpFiles, tideFiles, debug=False):

    fvdebugData = FVCOM(fvdebug)
    saveName = 'validationStruct.p'
    #Name = 'june_2013_3D_station'
    Struct = {}

    for fvFile in fvFiles:

        print fvFile
        fvData = station(fvFile)

        struct = np.array([])
        for adcpFile in adcpFiles:
            print adcpFile
            adcpData = ADCP(adcpFile)
            lonlat = np.array([adcpData.lon[0], adcpData.lat[0]]).T
            #lonlat = np.array([adcpData.x[0], adcpData.y[0]]).T
            #ind = closest_point(lonlat, fvData.lon, fvData.lat)
            newind = closest_point(lonlat, fvdebugData.lonc, fvdebugData.latc)
            #ind = closest_point(lonlat, fvData.x, fvData.y)
            new = np.array([fvdebugData.xc[newind], fvdebugData.yc[newind]])
            ind = closest_point(new.T, fvData.x, fvData.y)

            print ind
            print adcpData.mtime.shape
            print adcpData.ua.shape
            print adcpData.va.shape
            print adcpData.surf.shape

            adcpVelCoef = ut_solv(adcpData.mtime, adcpData.ua,
                            adcpData.va, adcpData.lat[0],
                            cnstit='auto', rmin=0.95, notrend=True,
                            method='ols', nodiagn=True, linci=True, coef_int=True)

            adcpElevCoef = ut_solv(adcpData.mtime, adcpData.surf,
                            [], adcpData.lat[0],
                            cnstit='auto', rmin=0.95, notrend=True,
                            method='ols', nodiagn=True, linci=True, coef_int=True)

            adcpName = adcpFile.split('/')[-1].split('.')[0]
            #WB_COMMENT: Doesn't currently work
            obs = pd.DataFrame({'u':adcpData.ua, 'v':adcpData.va, 'elev':adcpData.surf})

            print fvData.time.shape
            print fvData.ua[:, ind].shape
            print fvData.va[:, ind].shape
            print fvData.lat[ind].shape

            fvVelCoef = ut_solv(fvData.time, fvData.ua[:, ind].flatten(),
                                fvData.va[:, ind].flatten(),
                        adcpData.lat[0], cnstit='auto', rmin=0.95, notrend=True,
                        method='ols', nodiagn=True, linci=True, conf_int=True)

            print fvData.elev[:, ind].shape
            fvElevCoef = ut_solv(fvData.time, fvData.elev[:, ind].flatten(), [],
                        adcpData.lat[0], cnstit='auto', rmin=0.95, notrend=True,
                        method='ols', nodiagn=True, linci=True, conf_int=True)

            mod = pd.DataFrame({'ua':fvData.ua[:, ind].flatten(),
                                'va':fvData.va[:, ind].flatten(),
                                'elev':fvData.elev[:, ind].flatten()})


            obs_loc = {'name': adcpName, 'type':'ADCP', 'lat':fvdebugData.lat[newind],
                    'lon':fvdebugData.lon[newind], 'obs_timeseries':obs,
                    'mod_timeseries':mod, 'obs_time':adcpData.mtime,
                    'mod_time':fvData.time, 'vel_obs_harmonics':adcpVelCoef,
                    'elev_obs_harmonics':adcpElevCoef,
                    'vel_mod_harmonics':fvVelCoef, 'elev_mod_harmonics':fvElevCoef}

            struct = np.hstack((struct, obs_loc))

        Struct[Name] = struct

    if debug:
        pickle.dump(Struct, open("structADCP.p", "wb"))

    pickle.dump(Struct, open(saveName, "wb"))
    return Struct
Пример #17
0
    def __init__(self, observed, simulated, debug=False):
        if debug: print "..variables.."
        self.obs = observed.Variables
        self.sim = simulated.Variables
        self.struct = np.array([])
        #harmonic constituents to be evaluated
        ut_constits = ['M2','S2','N2','K2','K1','O1','P1','Q1']

        #Check if times coincide
        obsMax = self.obs.matlabTime.max()
        obsMin = self.obs.matlabTime.min()
        simMax = self.sim.matlabTime.max()
        simMin = self.sim.matlabTime.min()
        absMin = max(obsMin, simMin)
        absMax = min(obsMax, simMax)
        A = set(np.where(self.sim.matlabTime[:] >= absMin)[0].tolist()) 
        B = set(np.where(self.sim.matlabTime[:] <= absMax)[0].tolist())
        C = list(A.intersection(B))
        a = set(np.where(self.obs.matlabTime[:] >= absMin)[0].tolist()) 
        b = set(np.where(self.obs.matlabTime[:] <= absMax)[0].tolist())
        c = list(a.intersection(b))        
        if len(C) == 0:
           print "---Time between simulation and measurement does not match up---"
           sys.exit()

        #Check what kind of simulated data it is
        if simulated.__module__=='pyseidon.stationClass.stationClass':
            #Find closest point to ADCP
            ind = closest_point([self.obs.lon], [self.obs.lat],
                                simulated.Grid.lon[:],
                                simulated.Grid.lat[:])
            nameSite = ''.join(simulated.Grid.name[ind,:][0,:])
            print "Station site: " + nameSite
            el = self.sim.el[:, ind].flatten()
            ua = self.sim.ua[:, ind].flatten()
            va = self.sim.va[:, ind].flatten()
            if self.sim._3D:
                u = np.squeeze(self.sim.u[:, :,ind])
                v = np.squeeze(self.sim.v[:, :,ind])
                sig = np.squeeze(simulated.Grid.siglay[:, ind])
     
            #Harmonic analysis over matching time
            velCoef = ut_solv(self.sim.matlabTime[C],
                              ua[C], va[C],
                              simulated.Grid.lat[ind],
                              cnstit=ut_constits, rmin=0.95, notrend=True,
                              method='ols', nodiagn=True, linci=True, conf_int=True)

            elCoef = ut_solv(self.sim.matlabTime[C],
                             el[C], [],
                             simulated.Grid.lat[ind],
                             cnstit=ut_constits, rmin=0.95, notrend=True,
                             method='ols', nodiagn=True, linci=True, conf_int=True)
        #Alternative simulation type
        elif simulated.__module__=='pyseidon.fvcomClass.fvcomClass':
            #Interpolation at measurement location
            el=simulated.Util2D.interpolation_at_point(self.sim.el,
                                                       self.obs.lon, self.obs.lat)
            ua=simulated.Util2D.interpolation_at_point(self.sim.ua,
                                                       self.obs.lon, self.obs.lat)
            va=simulated.Util2D.interpolation_at_point(self.sim.va,
                                                       self.obs.lon, self.obs.lat)
            if self.sim._3D:
               u=simulated.Util3D.interpolation_at_point(self.sim.u,
                                                         self.obs.lon, self.obs.lat)
               v=simulated.Util3D.interpolation_at_point(self.sim.v,
                                                         self.obs.lon, self.obs.lat)
               sig=simulated.Util3D.interpolation_at_point(simulated.Grid.siglay,
                                                           self.obs.lon, self.obs.lat)
            #Harmonic analysis
            velCoef = ut_solv(self.sim.matlabTime[C],
                              ua[C], va[C], self.obs.lat,
                              cnstit=ut_constits, rmin=0.95, notrend=True,
                              method='ols', nodiagn=True, linci=True, conf_int=True)

            elCoef = ut_solv(self.sim.matlabTime[C],
                             el[C], [], self.obs.lat,
                             cnstit=ut_constits, rmin=0.95, notrend=True,
                             method='ols', nodiagn=True, linci=True, conf_int=True)

        else:
            print "-This type of simulations is not supported yet-"
            sys.exit()

        #Store in dict structure for compatibility purposes
        if not self.sim._3D:
            sim_mod={'ua':ua[:],
                     'va':va[:],
                     'elev':el[:]}
        else:
            sim_mod={'ua':ua[:],
                     'va':va[:],
                     'elev':el[:],
                     'u':u[:],
                     'v':v[:],
                     'siglay':sig[:]}
             

        #Check what kind of observed data it is
        if observed.__module__=='pyseidon.adcpClass.adcpClass':
            obstype='ADCP'
            #Harmonic analysis
            self.obs.velCoef = ut_solv(self.obs.matlabTime[c], self.obs.ua[c],
                               self.obs.va[c], self.obs.lat,
                               cnstit=ut_constits, rmin=0.95, notrend=True,
                               method='ols', nodiagn=True, linci=True, coef_int=True)
            

            self.obs.elCoef = ut_solv(self.obs.matlabTime[c], self.obs.surf[c],
                              [], self.obs.lat,
                              cnstit=ut_constits, rmin=0.95, notrend=True,
                              method='ols', nodiagn=True, linci=True, coef_int=True)

            #Store in dict structure for compatibility purposes
            obs_mod={'ua':self.obs.ua,
                     'va':self.obs.va,
                     'elev':self.obs.surf,
                     'u':self.obs.east_vel,
                     'v':self.obs.north_vel,
                     'bins':self.obs.bins}

        #Alternative measurement type
        elif observed.__module__=='pyseidon.tidegaugeClass.tidegaugeClass':
            obstype='TideGauge'
            self.obs.elCoef = ut_solv(self.obs.matlabTime[c], self.obs.el[c],
                                      [], self.obs.lat,
                                      cnstit=ut_constits, notrend=True,
                                      rmin=0.95, method='ols', nodiagn=True,
                                      #linci=True, ordercnstit='frq')
                                      linci=True, coef_int=True)

            #Store in dict structure for compatibility purposes
            obs_mod = {'data':self.obs.RBR.data, 'elev':self.obs.el}

        else:
            print "-This type of measurements is not supported yet-"
            sys.exit()

        #Store in dict structure for compatibility purposes
        #Common block for 'struct'
        self.struct = {'name': observed.History[0].split(' ')[-1],
                       'type':obstype,
                       'lat':self.obs.lat,
                       'lon':self.obs.lon,
                       'obs_timeseries':obs_mod,
                       'mod_timeseries':sim_mod,
                       'obs_time':self.obs.matlabTime,
                       'mod_time':self.sim.matlabTime,
                       'elev_obs_harmonics':self.obs.elCoef,
                       'elev_mod_harmonics':elCoef}
        #Special blocks for 'struct'
        if self.struct['type']=='ADCP':
            self.struct['vel_obs_harmonics'] = self.obs.velCoef
            self.struct['vel_mod_harmonics'] = velCoef

        if debug: print "..done"
Пример #18
0
newlon[:] = lon
newlat = data.createVariable('lat', 'f8', ('dimx',))
newlat[:] = lat
newlonc = data.createVariable('lonc', 'f8', ('dim',))
newlonc[:] = lonc
newlatc = data.createVariable('latc', 'f8', ('dim',))
newlatc[:] = latc
newh = data.createVariable('h', 'f8', ('dimx',))
newh[:] = h
newtime = data.createVariable('time', 'f8', ('dimtime',))
newtime[:] = time
newtrinodes = data.createVariable('trinodes', 'f8', ('dim','dimtri'))
newtrinodes[:] = trinodes

coef = ut_solv(time, ua[:, 0], va[:, 0], uvnodell[0, 1],
                cnstit='auto', rmin=Rayleigh[0], notrend=True, method='ols',
                nodiagn=True, linci=True, conf_int=True)

#opt = pd.DataFrame(coef['aux']['opt'].items())
opt = coef['aux']['opt']
del coef['aux']['opt']
aux = pd.DataFrame(coef['aux'])
del coef['aux']
c = pd.DataFrame(coef)

cat = pd.concat([c, aux], axis=1)

data.createDimension('dim2', cat['Lsmaj'].shape[0])
data.createDimension('optDim', len(opt))
Lsmaj = data.createVariable('Lsmaj', 'f8', ('dim','dim2'))
Lsmaj_ci = data.createVariable('Lsmaj_ci', 'f8', ('dim','dim2'))
Пример #19
0
    def harmonics(self, **kwarg):

        self.coef = ut_solv(self.time,
                            (self.data-np.mean(self.data)), [],
                            self.lat, **kwarg)
Пример #20
0
        ADCP.index = pd.to_datetime(ADCP.index)

        adcpTime = np.empty(ADCP.index.shape)

        for j, jj in enumerate(ADCP.index):
            adcpTime[j] = datetime2matlabdn(jj)

#        adcpCoef = ut_solv(adcpTime, ADCP['u'].values, ADCP['v'].values, uvnodell[ii, 1],
#                           'auto', Rayleigh[0], 'NoTrend', 'Rmin', 'OLS',
#                           'NoDiagn', 'LinCI')

        order = ['M2','S2','N2','K2','K1','O1','P1','Q1']

        adcpCoef = ut_solv(adcpTime, ADCP['u'].values, ADCP['v'].values,
                           lonclatc[i, 1],
                           cnstit=order, rmin=Rayleigh[0], notrend=True,
                           method='ols', nodiagn=True, linci=True,
                           conf_int=True, ordercnstit='frq')

        adcpAUX = adcpCoef['aux']
        del adcpAUX['opt']
        del adcpCoef['aux']

        adcpAUX = pd.DataFrame(adcpAUX)
        a = pd.DataFrame(adcpCoef)
        size = a.shape[0]
        nameSpacer = pd.DataFrame({'ADCP_Location': np.repeat(adcp.iloc[i, 0],
                                                              size)})

        bottomName = pd.DataFrame({'bottomFriction': np.repeat(bottomfriction,
                                                              size)})
#score=(55-capacity_factor)/10+abs(hc'-40)/10 +(max(distance',2500)-2500)/500;
#score=20*(1-meanP/1e6)+abs(hc'-40)/40 +((max(distance',3000)-3000)/250).^1;
score = 20 * (1 - meanP / 1e6)
turbine_score = score
#Find best location


loci = np.empty((N))
for ii in xrange(N):

    loci[ii] = np.argmin(turbine_score)

    # do u_tide analysis at loc
    coef = ut_solv(time, ua[:,loci[ii]], va[:,loci[ii]], uvnodell[loci[ii],1],
                  cnstit='auto', rmin=Rayleigh[0], notrend=True, method='ols',
                  nodiagn=True, linci=True, conf_int=False)


    coef = ut_solv(time, ua[:,loci[ii]], np.array([]), uvnodell[loci[ii],1],
                  cnstit='auto', rmin=Rayleigh[0], notrend=True, method='ols',
                  nodiagn=True, linci=True, conf_int=False)
#    coef = ut_solv(time, ua[:,loci[ii]], np.array([]), uvnodell[loci[ii],1],
#                  'auto', Rayleigh[0],'NoTrend','Rmin', 'OLS',
#                  'NoDiagn', 'LinCI')

    import pdb; pdb.set_trace()

    # for testing
    if ii == 0:
        pickle.dump(coef, open( "coef.p", "wb"))