Esempio n. 1
0
def main():
    startTime, endTime, modelList, prefixList, workDir = getUsrInp()
    datestring = (startTime.strftime('%Y-%m-%dT%H0000Z') + '_' +
                  endTime.strftime('%Y-%m-%dT%H0000Z'))
    for model in modelList:
        prefixes = prefixList[model]
        for prefix in prefixes:
            fpath = os.path.join(workDir, model, prefix)
            if not os.path.exists(fpath):
                os.makedirs(fpath)
            else:
                files = glob.glob(os.path.join(fpath, '*'))
                for f in files:
                    if os.path.isfile(f): os.remove(f)
                    if os.path.isdir(f): shutil.rmtree(f)
            # Do stations first
            go = getDataFRF.getObs(startTime, endTime)
            gm = getDataFRF.getDataTestBed(startTime, endTime)
            for station in stationList:
                (time, obsStats, modStats, plotList, obsi,
                 modi) = getStats(startTime, endTime, model, prefix, station,
                                  go, gm)
                if time is None:
                    continue
                for param in plotList:
                    obs = obsStats[param][obsi.astype(np.int)]
                    obs = checkMask(obs, 'observation')
                    mod = modStats[param][modi.astype(np.int)]
                    mod = checkMask(mod, 'model')
                    ofname = os.path.join(
                        fpath,
                        'CMTB-waveModels_{}_{}_station_{}_{}_{}.png'.format(
                            model, prefix, station, param, datestring))
                    makePlots(ofname, param, time, obs, mod)
            # Now do field
            gm = getDataFRF.getDataTestBed(startTime, endTime)
            for isLocal in [True, False]:
                try:
                    bathy = gm.getModelField('bathymetry',
                                             prefix,
                                             isLocal,
                                             model=model)
                except AssertionError as err:
                    if err.message == " there's no data":
                        local = ' local' if isLocal is True else ' regional'
                        print('No ' + model + local + ' ' + prefix +
                              ' field data for ' + datestring)
                        continue
                    else:
                        raise err
                for varName in fieldVarList:
                    try:
                        var = gm.getModelField(varName,
                                               prefix,
                                               isLocal,
                                               model=model)
                    except AssertionError as err:
                        if 'variable called is not in file please use' in err.message:
                            continue
                        else:
                            raise err
                    for key in var.keys():  # Check data for masks
                        var[key] = checkMask(var[key],
                                             key + ' (in ' + varName + ')')
                    fieldpacket = makeFieldpacket(varName, var, isLocal)
                    grid = 'Local' if isLocal else 'Regional'
                    kwargs = {}
                    if varName == 'waveHs':
                        waveDm = gm.getModelField('waveDm',
                                                  prefix,
                                                  isLocal,
                                                  model=model)['waveDm']
                        kwargs['directions'] = waveDm
                    print('Generating field variable plots for ' + prefix +
                          ' ' + model + ' ' + grid + ' ' + varName)
                    oP.plotSpatialFieldData(bathy,
                                            fieldpacket,
                                            os.path.join(fpath, varName),
                                            isLocal,
                                            model=model,
                                            **kwargs)
                    imList = sorted(glob.glob(fpath + '/' + varName +
                                              '_*.png'))
                    print('Generating field variable gifs for ' + prefix +
                          ' ' + model + ' ' + grid + ' ' + varName)
                    ofname = os.path.join(
                        fpath, 'CMTB-waveModels_{}_{}_{}_{}_{}.gif'.format(
                            model, prefix, grid, varName, datestring))
                    sb.makegif(imList, ofname)
                    [os.remove(ff) for ff in imList]
Esempio n. 2
0
def CMSsimSetup(startTime, inputDict):
    """This Function is the master call for the  data preparation for the Coastal Model
    Test Bed (CMTB) and the CMS wave/FLow model


    NOTE: input to the function is the end of the duration.  All Files are labeled by this convention
    all time stamps otherwise are top of the data collection

    Args:
        startTime (str): this is a string of format YYYY-mm-ddTHH:MM:SSZ (or YYYY-mm-dd) in UTC time
        inputDict (dict): this is a dictionary that is read from the yaml read function

    """
    # begin by setting up input parameters
    if 'simulationDuration' in inputDict:
        timerun = inputDict['simulationDuration']
    else:
        timerun = 24
    if 'pFlag' in inputDict:
        pFlag = inputDict['pFlag']
    else:
        pFlag = True
    assert 'version_prefix' in inputDict, 'Must have "version_prefix" in your input yaml'
    version_prefix = inputDict['version_prefix']
    if 'THREDDS' in inputDict:
        server = inputDict['THREDDS']
    else:
        print('Chosing CHL thredds by Default, this may be slower!')
        server = 'CHL'

    TOD = 0  # hour of day simulation to start (UTC)
    path_prefix = inputDict[
        'path_prefix']  # + "/%s/" %version_prefix  # data super directiory
    # ______________________________________________________________________________
    # define version parameters
    versionlist = ['HP', 'UNTUNED']
    assert version_prefix in versionlist, 'Please check your version Prefix'
    simFnameBackground = inputDict[
        'gridSIM']  # ''/home/spike/cmtb/gridsCMS/CMS-Wave-FRF.sim'
    backgroundDepFname = inputDict[
        'gridDEP']  # ''/home/spike/cmtb/gridsCMS/CMS-Wave-FRF.dep'
    CMSinterp = inputDict.get('CMSinterp', 50)  # max freq bins for the model
    fastModeOn = inputDict.get('fastMode', False)
    # do versioning stuff here
    if version_prefix == 'HP':
        full = False
    elif version_prefix == 'UNTUNED':
        full = False
    else:
        raise NotImplementedError('Check Version Prefix')

    # _______________________________________________________________________________
    # set times
    try:
        d1 = DT.datetime.strptime(
            startTime, '%Y-%m-%dT%H:%M:%SZ') + DT.timedelta(TOD / 24., 0, 0)
        d2 = d1 + DT.timedelta(0, timerun * 3600, 0)
        date_str = d1.strftime('%Y-%m-%dT%H%M%SZ')  # used to be endtime

    except ValueError:
        assert len(
            startTime
        ) == 10, 'Your Time does not fit convention, check T/Z and input format'
        d1 = DT.datetime.strptime(startTime, '%Y-%m-%d') + DT.timedelta(
            TOD / 24., 0, 0)
        d2 = d1 + DT.timedelta(0, timerun * 3600, 0)
        date_str = d1.strftime('%Y-%m-%d')  # used to be endtime
        assert int(timerun) >= 24, 'Running Simulations with less than 24 Hours of simulation time require end ' \
                                   'Time format in type: %Y-%m-%dT%H:%M:%SZ'
    if type(timerun) == str:
        timerun = int(timerun)

    # __________________Make Diretories_____________________________________________
    #
    if not os.path.exists(path_prefix + date_str):  # if it doesn't exist
        os.makedirs(path_prefix + date_str)  # make the directory
    if not os.path.exists(path_prefix + date_str + "/figures/"):
        os.makedirs(path_prefix + date_str + "/figures/")

    print "Model Time Start : %s  Model Time End:  %s" % (d1, d2)
    print u"OPERATIONAL files will be place in {0} folder".format(path_prefix +
                                                                  date_str)
    # ______________________________________________________________________________
    # begin model data gathering
    ## _____________WAVES____________________________
    go = getObs(d1, d2, THREDDS=server)  # initialize get observation
    print '_________________\nGetting Wave Data'
    rawspec = go.getWaveSpec(gaugenumber=0)
    assert rawspec is not None, "\n++++\nThere's No Wave data between %s and %s \n++++\n" % (
        d1, d2)

    prepdata = STPD.PrepDataTools()
    # rotate and lower resolution of directional wave spectra
    wavepacket = prepdata.prep_spec(
        rawspec,
        version_prefix,
        datestr=date_str,
        plot=pFlag,
        full=full,
        outputPath=path_prefix,
        CMSinterp=CMSinterp)  # freq bands are max for model
    print "number of wave records %d with %d interpolated points" % (np.shape(
        wavepacket['spec2d'])[0], wavepacket['flag'].sum())

    ## _____________WINDS______________________
    print '_________________\nGetting Wind Data'
    try:
        rawwind = go.getWind(gaugenumber=0)
        # average and rotate winds
        windpacket = prepdata.prep_wind(rawwind, wavepacket['epochtime'])
        # wind height correction
        print 'number of wind records %d with %d interpolated points' % (
            np.size(windpacket['time']), sum(windpacket['flag']))
    except (RuntimeError, TypeError):
        windpacket = None
        print ' NO WIND ON RECORD'

    ## ___________WATER LEVEL__________________
    print '_________________\nGetting Water Level Data'
    try:
        # get water level data
        rawWL = go.getWL()
        # average WL
        WLpacket = prepdata.prep_WL(rawWL, wavepacket['epochtime'])
        print 'number of WL records %d, with %d interpolated points' % (
            np.size(WLpacket['time']), sum(WLpacket['flag']))
    except (RuntimeError, TypeError):
        WLpacket = None
    ### ____________ Get bathy grid from thredds ________________
    gdTB = getDataTestBed(d1, d2)
    # bathy = gdTB.getGridCMS(method='historical')
    bathy = gdTB.getBathyIntegratedTransect(
        method=1)  # , ForcedSurveyDate=ForcedSurveyDate)
    bathy = prepdata.prep_CMSbathy(bathy,
                                   simFnameBackground,
                                   backgroundGrid=backgroundDepFname)
    ### ___________ Create observation locations ________________ # these are cell i/j locations
    gaugelocs = []
    #get gauge nodes x/y
    for gauge in go.gaugelist:
        pos = go.getWaveGaugeLoc(gauge)
        coord = gp.FRFcoord(pos['Lon'], pos['Lat'], coordType='LL')
        i = np.abs(coord['xFRF'] - bathy['xFRF'][::-1]).argmin()
        j = np.abs(coord['yFRF'] - bathy['yFRF'][::-1]).argmin()
        gaugelocs.append([i, j])

    ## begin output
    cmsio = inputOutput.cmsIO()  # initializing the I/o Script writer
    stdFname = os.path.join(path_prefix, date_str,
                            date_str + '.std')  # creating file names now
    simFnameOut = os.path.join(path_prefix, date_str, date_str + '.sim')
    specFname = os.path.join(path_prefix, date_str, date_str + '.eng')
    bathyFname = os.path.join(path_prefix, date_str, date_str + '.dep')

    gridOrigin = (bathy['x0'], bathy['y0'])

    cmsio.writeCMS_std(fname=stdFname,
                       gaugeLocs=gaugelocs,
                       fastMode=fastModeOn)
    cmsio.writeCMS_sim(simFnameOut, date_str, gridOrigin)
    cmsio.writeCMS_spec(specFname,
                        wavePacket=wavepacket,
                        wlPacket=WLpacket,
                        windPacket=windpacket)
    cmsio.writeCMS_dep(bathyFname, depPacket=bathy)
    stio = inputOutput.stwaveIO('')
    inputOutput.write_flags(date_str,
                            path_prefix,
                            wavepacket,
                            windpacket,
                            WLpacket,
                            curpacket=None)
    # remove old output files so they're not appended, cms defaults to appending output files
    try:
        os.remove(os.path.join(path_prefix, date_str, cmsio.waveFname))
        os.remove(os.path.join(path_prefix, date_str, cmsio.selhtFname))
        os.remove(os.path.join(path_prefix + date_str, cmsio.obseFname))
    except OSError:  # there are no files to delete
        pass
Esempio n. 3
0
from morphLib import findSandBarAndTrough1D
sys.path.append('/home/spike/repos')
from getdatatestbed import getDataFRF
import datetime as DT
import numpy as np
from matplotlib import pyplot as plt
from scipy import interpolate
from matplotlib import patches as mpatches
from morphLib import findSandBarAndTrough1D
#######################################################################################################################
meanPickleName = 'meanSandbarProfile.pickle'
profileNumber = 1097  # 1097
crossShoreMax = 1000  # how far do we want to look in cross-shore
minPoints4Survey = 10

go = getDataFRF.getObs(DT.datetime(1975, 5, 1), DT.datetime(2019, 11, 1))
survey = go.getBathyTransectFromNC(forceReturnAll=True)
NorthIdx = survey['profileNumber'] == profileNumber
# isolate data for North-side line only
timesNorth_all = survey['time'][:]
xFRFNorth = survey['xFRF'][:]
elevationNorth = survey['elevation'][:]
surveyNumberNorth = survey['surveyNumber'][:]
#now remove striding issure
timesNorth_all = timesNorth_all[NorthIdx]
xFRFNorth = xFRFNorth[NorthIdx]
elevationNorth = elevationNorth[NorthIdx]
surveyNumberNorth = surveyNumberNorth[NorthIdx]
UniqueSurveyNumbersNorth = np.unique(surveyNumberNorth)
## now simplify the dates of the data
xOut, tOut = np.arange(0, 1800), [
Esempio n. 4
0
def CMSanalyze(startTime, inputDict):
    """This runs the post process script for CMS wave
    will create plots and netcdf files at request

    Args:
        inputDict (dict): this is an input dictionary that was generated with the
            keys from the project input yaml file
        startTime (str): input start time with datestring in format YYYY-mm-ddThh:mm:ssZ

    :return:
        plots in the inputDict['workingDirectory'] location
        netCDF files to the inputDict['netCDFdir'] directory

    """
    # ___________________define Global Variables___________________________________
    if 'pFlag' in inputDict:
        pFlag = inputDict['pFlag']
    else:
        pFlag = True  # will plot true by default
    version_prefix = inputDict['version_prefix']
    path_prefix = inputDict[
        'path_prefix']  # + "/%s/" %version_prefix   # 'data/CMS/%s/' % version_prefix  # for organizing data
    simulationDuration = inputDict['simulationDuration']
    if 'netCDFdir' in inputDict:
        Thredds_Base = inputDict['netCDFdir']
    else:
        whoami = check_output('whoami', shell=True)[:-1]
        Thredds_Base = '/home/%s/thredds_data/' % whoami
    if 'THREDDS' in inputDict:
        server = inputDict['THREDDS']
    else:
        print('Chosing CHL thredds by Default, this may be slower!')
        server = 'CHL'

    # _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
    # establishing the resolution of the input datetime
    d1 = DT.datetime.strptime(startTime, '%Y-%m-%dT%H:%M:%SZ')
    d2 = d1 + DT.timedelta(0, simulationDuration * 3600, 0)
    datestring = d1.strftime('%Y-%m-%dT%H%M%SZ')  # a string for file names
    fpath = os.path.join(path_prefix, datestring)
    model = 'CMS'
    # ____________________________________________________________________________
    if version_prefix == 'HP':
        full = False
    elif version_prefix == 'UNTUNED':
        full = False
    # _____________________________________________________________________________

    print '\nBeggining of Analyze Script\nLooking for file in ' + fpath
    print '\nData Start: %s  Finish: %s' % (d1, d2)
    print 'Analyzing simulation'
    go = getDataFRF.getObs(d1, d2, server)  # setting up get data instance
    prepdata = STPD.PrepDataTools(
    )  # initializing instance for rotation scheme
    cio = cmsIO(
    )  # =pathbase) looks for model output files in folder to analyze
    ######################################################################################################################
    ######################################################################################################################
    ##################################   Load Data Here / Massage Data Here   ############################################
    ######################################################################################################################
    ######################################################################################################################
    t = DT.datetime.now()
    print 'Loading files '
    cio.ReadCMS_ALL(fpath)  # load all files
    stat_packet = cio.stat_packet  # unpack dictionaries from class instance
    obse_packet = cio.obse_Packet
    radStress_packet = cio.radSt_packet
    breaking_packet = cio.break_packet
    dep_pack = cio.dep_Packet
    dep_pack['bathy'] = np.expand_dims(dep_pack['bathy'], axis=0)
    # convert dep_pack to proper dep pack with keys
    wave_pack = cio.wave_Packet
    print 'Loaded files in %s' % (DT.datetime.now() - t)
    # correct model outout angles from STWAVE(+CCW) to Geospatial (+CW)
    stat_packet['WaveDm'] = testbedutils.anglesLib.STWangle2geo(
        stat_packet['WaveDm'])
    # correct angles
    stat_packet['WaveDm'] = testbedutils.anglesLib.angle_correct(
        stat_packet['WaveDm'])
    obse_packet['ncSpec'] = np.ones(
        (obse_packet['spec'].shape[0], obse_packet['spec'].shape[1],
         obse_packet['spec'].shape[2], 72)) * 1e-6
    # interp = np.ones((obse_packet['spec'].shape[0], obse_packet['spec'].shape[1], wavefreqbin.shape[0],
    #                   obse_packet['spec'].shape[3])) * 1e-6  ### TO DO marked for removal
    for station in range(0, np.size(obse_packet['spec'], axis=1)):
        # for tt in range(0, np.size(obse_packet['spec'], axis=0)):  # interp back to 62 frequencies
        #         f = interpolate.interp2d(obse_packet['wavefreqbin'], obse_packet['directions'],
        #                                  obse_packet['spec'][tt, station, :, :].T, kind='linear')
        # interp back to frequency bands that FRF data are kept in
        # interp[tt, station, :, :] = f(wavefreqbin, obse_packet['directions']).T

        # rotate the spectra back to true north
        obse_packet['ncSpec'][:, station, :, :], obse_packet[
            'ncDirs'] = prepdata.grid2geo_spec_rotate(
                obse_packet['directions'], obse_packet['spec'][:,
                                                               station, :, :]
            )  # interp[:, station, :, :]) - this was with interp
        # now converting m^2/Hz/radians back to m^2/Hz/degree
        # note that units of degrees are on the denominator which requires a deg2rad conversion instead of rad2deg
        obse_packet['ncSpec'][:, station, :, :] = np.deg2rad(
            obse_packet['ncSpec'][:, station, :, :])
    obse_packet['modelfreqbin'] = obse_packet['wavefreqbin']
    obse_packet['wavefreqbin'] = obse_packet[
        'wavefreqbin']  # wavefreqbin  # making sure output frequency bins now match the freq that were interped to

    ######################################################################################################################
    ######################################################################################################################
    ##################################  Spatial Data HERE     ############################################################
    ######################################################################################################################
    ######################################################################################################################
    gridPack = prepdata.makeCMSgridNodes(float(cio.sim_Packet[0]),
                                         float(cio.sim_Packet[1]),
                                         float(cio.sim_Packet[2]),
                                         dep_pack['dx'], dep_pack['dy'],
                                         dep_pack['bathy'])  # dims [t, x, y]
    # ################################
    #        Make NETCDF files       #
    # ################################
    if np.median(gridPack['elevation']) < 0:
        gridPack['elevation'] = -gridPack['elevation']
    fldrArch = os.path.join(model, version_prefix)
    spatial = {
        'time':
        nc.date2num(wave_pack['time'],
                    units='seconds since 1970-01-01 00:00:00'),
        'station_name':
        'Regional Simulation Field Data',
        'waveHs':
        np.transpose(wave_pack['waveHs'],
                     (0, 2, 1)),  # put into dimensions [t, y, x]
        'waveTm':
        np.transpose(np.ones_like(wave_pack['waveHs']) * -999, (0, 2, 1)),
        'waveDm':
        np.transpose(wave_pack['waveDm'],
                     (0, 2, 1)),  # put into dimensions [t, y, x]
        'waveTp':
        np.transpose(wave_pack['waveTp'],
                     (0, 2, 1)),  # put into dimensions [t, y, x]
        'radStressX':
        np.transpose(radStress_packet['radStressX'], (0, 2, 1)),
        'radStressY':
        np.transpose(radStress_packet['radStressY'], (0, 2, 1)),
        'dissipation':
        np.transpose(breaking_packet['dissipation'], (0, 2, 1)),
        'bathymetry':
        np.transpose(gridPack['elevation'],
                     (0, 2, 1)),  # put into dimensions [t, y, x]
        'latitude':
        gridPack[
            'latitude'],  # put into dimensions [t, y, x] - NOT WRITTEN TO FILE
        'longitude':
        gridPack[
            'longitude'],  # put into dimensions [t, y, x] - NOT WRITTEN TO FILE
        'xFRF':
        gridPack['xFRF'],  # put into dimensions [t, y, x]
        'yFRF':
        gridPack['yFRF'],  # put into dimensions [t, y, x]
        ######################
        'DX':
        dep_pack['dx'],
        'DX':
        dep_pack['dy'],
        'NI':
        dep_pack['NI'],
        'NJ':
        dep_pack['NJ'],
        'grid_azimuth':
        gridPack['azimuth']
    }

    TdsFldrBase = os.path.join(Thredds_Base, fldrArch)
    NCpath = sb.makeNCdir(Thredds_Base,
                          os.path.join(version_prefix, 'Field'),
                          datestring,
                          model=model)
    # make the name of this nc file
    NCname = 'CMTB-waveModels_{}_{}_Field_{}.nc'.format(
        model, version_prefix, datestring)
    fieldOfname = os.path.join(
        NCpath, NCname
    )  # TdsFldrBase + '/CMTB-waveModels_CMS_{}_Local-Field_%s.nc'.format(version_prefix, datestring)

    if not os.path.exists(TdsFldrBase):
        os.makedirs(
            TdsFldrBase)  # make the directory for the thredds data output
    if not os.path.exists(os.path.join(TdsFldrBase, 'Field', 'Field.ncml')):
        inputOutput.makencml(
            os.path.join(TdsFldrBase, 'Field',
                         'Field.ncml'))  # remake the ncml if its not there
    # make file name strings
    flagfname = os.path.join(fpath, 'Flags{}.out.txt'.format(
        datestring))  # startTime # the name of flag file
    fieldYaml = 'yaml_files/waveModels/%s/Field_globalmeta.yml' % (fldrArch
                                                                   )  # field
    varYaml = 'yaml_files/waveModels/%s/Field_var.yml' % (fldrArch)
    assert os.path.isfile(
        fieldYaml
    ), 'NetCDF yaml files are not created'  # make sure yaml file is in place
    makenc.makenc_field(data_lib=spatial,
                        globalyaml_fname=fieldYaml,
                        flagfname=flagfname,
                        ofname=fieldOfname,
                        var_yaml_fname=varYaml)
    ###################################################################################################################
    ###############################   Plotting  Below   ###############################################################
    ###################################################################################################################
    dep_pack['bathy'] = np.transpose(dep_pack['bathy'],
                                     (0, 2, 1))  # dims [t, y, x]
    plotParams = [('waveHs', '$m$'), ('bathymetry', 'NAVD88 $[m]$'),
                  ('waveTp', '$s$'), ('waveDm', '$degTn$')]
    if pFlag == True:
        for param in plotParams:
            print '    plotting %s...' % param[0]
            spatialPlotPack = {
                'title': 'Regional Grid: %s' % param[0],
                'xlabel': 'Longshore distance [m]',
                'ylabel': 'Cross-shore distance [m]',
                'field': spatial[param[0]],
                'xcoord': spatial['xFRF'],
                'ycoord': spatial['yFRF'],
                'cblabel': '%s - %s' % (param[0], param[1]),
                'time': nc.num2date(spatial['time'],
                                    'seconds since 1970-01-01')
            }
            fnameSuffix = 'figures/CMTB_CMS_%s_%s' % (version_prefix, param[0])
            if param[0] == 'waveHs':
                oP.plotSpatialFieldData(dep_pack,
                                        spatialPlotPack,
                                        os.path.join(fpath, fnameSuffix),
                                        nested=0,
                                        directions=spatial['waveDm'])
            else:
                oP.plotSpatialFieldData(dep_pack,
                                        spatialPlotPack,
                                        os.path.join(fpath, fnameSuffix),
                                        nested=0)
            # now make a gif for each one, then delete pictures
            fList = sorted(glob.glob(fpath + '/figures/*%s*.png' % param[0]))
            sb.makegif(
                fList, fpath + '/figures/CMTB_%s_%s_%s.gif' %
                (version_prefix, param[0], datestring))
            [os.remove(ff) for ff in fList]

    ######################################################################################################################
    ######################################################################################################################
    ##################################  Wave Station Files HERE (loop) ###################################################
    ######################################################################################################################
    ######################################################################################################################

    # this is a list of file names to be made with station data from the parent simulation
    stationList = [
        'waverider-26m', 'waverider-17m', 'awac-11m', '8m-array', 'awac-6m',
        'awac-4.5m', 'adop-3.5m', 'xp200m', 'xp150m', 'xp125m'
    ]
    for gg, station in enumerate(stationList):

        try:
            # generate yaml file name
            stat_yaml_fname = 'yaml_files/waveModels/{}/Station_var.yml'.format(
                fldrArch)
            globalyaml_fname = 'yaml_files/waveModels/{}/Station_globalmeta.yml'.format(
                fldrArch)
            # go get data or locations depending on if we're plotting against data
            if pFlag == True:
                w = go.getWaveSpec(station)  # go get all data
            else:
                w = go.getWaveGaugeLoc(station)

            stat_data = {
                'time':
                nc.date2num(stat_packet['time'][:],
                            units='seconds since 1970-01-01 00:00:00'),
                'waveHs':
                stat_packet['waveHs'][:, gg],
                'waveTm':
                np.ones_like(stat_packet['waveHs'][:, gg]) *
                -999,  # this isn't output by model, but put in fills to stay constant
                'waveDm':
                stat_packet['WaveDm'][:, gg],
                'waveTp':
                stat_packet['Tp'][:, gg],
                'waterLevel':
                stat_packet['waterLevel'][:, gg],
                'swellHs':
                stat_packet['swellHs'][:, gg],
                'swellTp':
                stat_packet['swellTp'][:, gg],
                'swellDm':
                stat_packet['swellDm'][:, gg],
                'seaHs':
                stat_packet['seaHs'][:, gg],
                'seaTp':
                stat_packet['seaTp'][:, gg],
                'seaDm':
                stat_packet['seaDm'][:, gg],
                'station_name':
                station,
                'directionalWaveEnergyDensity':
                obse_packet['ncSpec'][:, gg, :, :],
                'waveDirectionBins':
                obse_packet['ncDirs'],
                'waveFrequency':
                obse_packet['wavefreqbin'],
                ###############################
                'DX':
                dep_pack['dx'],
                'DY':
                dep_pack['dy'],
                'NI':
                dep_pack['NI'],
                'NJ':
                dep_pack['NJ'],
                'grid_azimuth':
                gridPack['azimuth']
            }
            try:
                stat_data['Latitude'] = w['latitude']
                stat_data['Longitude'] = w['longitude']
            except KeyError:  # this should be rectified
                stat_data['Latitude'] = w['lat']
                stat_data['Longitude'] = w['lon']
            # Name files and make sure server directory has place for files to go
            print 'making netCDF for model output at %s ' % station
            TdsFldrBase = os.path.join(Thredds_Base, fldrArch, station)

            NCpath = sb.makeNCdir(Thredds_Base,
                                  os.path.join(version_prefix, station),
                                  datestring,
                                  model='CMS')
            # make the name of this nc file
            NCname = 'CMTB-waveModels_{}_{}_{}_{}.nc'.format(
                model, version_prefix, station, datestring)
            outFileName = os.path.join(NCpath, NCname)

            if not os.path.exists(TdsFldrBase):
                os.makedirs(
                    TdsFldrBase
                )  # make the directory for the file/ncml to go into
            if not os.path.exists(os.path.join(TdsFldrBase,
                                               station + '.ncml')):
                inputOutput.makencml(
                    os.path.join(TdsFldrBase, station + '.ncml'))
            # make netCDF
            makenc.makenc_Station(stat_data,
                                  globalyaml_fname=globalyaml_fname,
                                  flagfname=flagfname,
                                  ofname=outFileName,
                                  stat_yaml_fname=stat_yaml_fname)

            print "netCDF file's created for station: %s " % station
            ###################################################################################################################
            ###############################   Plotting  Below   ###############################################################
            ###################################################################################################################

            if pFlag == True and 'time' in w:
                if full == False:
                    w['dWED'], w['wavedirbin'] = sbwave.HPchop_spec(
                        w['dWED'], w['wavedirbin'], angadj=70)
                obsStats = sbwave.waveStat(w['dWED'], w['wavefreqbin'],
                                           w['wavedirbin'])

                modStats = sbwave.waveStat(
                    obse_packet['ncSpec'][:, gg, :, :],
                    obse_packet['wavefreqbin'],
                    obse_packet['ncDirs'])  # compute model stats here

                time, obsi, modi = sb.timeMatch(
                    nc.date2num(w['time'], 'seconds since 1970-01-01'),
                    np.arange(w['time'].shape[0]),
                    nc.date2num(stat_packet['time'][:],
                                'seconds since 1970-01-01'),
                    np.arange(len(stat_packet['time'])))  # time match

                for param in modStats:  # loop through each bulk statistic
                    if len(time) > 1 and param in [
                            'Hm0', 'Tm', 'sprdF', 'sprdD', 'Tp', 'Dm'
                    ]:
                        print '    plotting %s: %s' % (station, param)
                        if param in ['Tp', 'Tm10']:
                            units = 's'
                            title = '%s period' % param
                        elif param in ['Hm0']:
                            units = 'm'
                            title = 'Wave Height %s ' % param
                        elif param in ['Dm', 'Dp']:
                            units = 'degrees'
                            title = 'Direction %s' % param
                        elif param in ['sprdF', 'sprdD']:
                            units = '_.'
                            title = 'Spread %s ' % param

                        # now run plots
                        p_dict = {
                            'time': nc.num2date(time,
                                                'seconds since 1970-01-01'),
                            'obs': obsStats[param][obsi.astype(int)],
                            'model': modStats[param][modi.astype(int)],
                            'var_name': param,
                            'units':
                            units,  # ) -> this will be put inside a tex math environment!!!!
                            'p_title': title
                        }

                        ofname = os.path.join(
                            fpath, 'figures/Station_%s_%s_%s.png' %
                            (station, param, datestring))
                        stats = obs_V_mod_TS(
                            ofname,
                            p_dict,
                            logo_path='ArchiveFolder/CHL_logo.png')

                        if station == 'waverider-26m' and param == 'Hm0':
                            # this is a fail safe to abort run if the boundary conditions don't
                            # meet quality standards below
                            bias = 0.1  # bias has to be within 10 centimeters
                            RMSE = 0.1  # RMSE has to be within 10 centimeters
                            if isinstance(p_dict['obs'], np.ma.masked_array
                                          ) and ~p_dict['obs'].mask.any():
                                p_dict['obs'] = np.array(p_dict['obs'])
                            # try:
                            #     # assert stats['RMSE'] < RMSE, 'RMSE test on spectral boundary energy failed'
                            #     # assert np.abs(stats['bias']) < bias, 'bias test on spectral boundary energy failed'
                            # except:
                            #     print '!!!!!!!!!!FAILED BOUNDARY!!!!!!!!'
                            #     print 'deleting data from thredds!'
                            #     os.remove(fieldOfname)
                            #     os.remove(outFileName)
                            #     raise RuntimeError('The Model Is not validating its offshore boundary condition')
        except IndexError:
            # if an index error is raised (from get data, returns no data), keep processing the rest of the stations
            continue
        allYs = np.tile(yy, [np.size(claris['time']), 1, 1])
        # now eliminate xxFRFbackground/yyFRFbackground values that the dune['elevation'] values are masked (True)
        xs = np.ma.concatenate(
            [xs, allXs[~claris['elevation'].mask].flatten()])
        ys = np.ma.concatenate(
            [ys, allYs[~claris['elevation'].mask].flatten()])
        zs = np.ma.concatenate(
            [zs, claris['elevation'][~claris['elevation'].mask].flatten()])
        types.append(' claris')

    return xs, ys, zs, types


dt = DT.timedelta(days=1)

go = getDataFRF.getObs(start, end)
bathyAll = go.getBathyTransectFromNC(forceReturnAll=True)

for tt, date in enumerate(sb.createDateList(
        start, end, dt)):  # make one merged product (hourly)
    print('--- making grid for {}'.format(date))
    ###################
    # now gather data #
    ###################
    # Gather Data for today's interpolations
    go = getDataFRF.getObs(date, date + dt)
    # now gather data
    pierAll = go.getLidarDEM(lidarLoc='pier')
    duneAll = go.getLidarDEM(lidarLoc='dune')
    clarisAll = None  # go.getLidarDEM(lidarLoc='claris')
Esempio n. 6
0
def cBathy_ThresholdedLogic(cBathy, rawspec, waveHsThreshold=1.2):
    """Logic associated with creating the wave height thresholded kalman filtered cBathy representation

    Args:
      cBathy: dictionary from go.getcBathy data
      rawspec: dictionary from go.getwavespec function
      waveHsThreshold: a decimal value for which to compare when generating the new kalman filter (Default value = 1.2)

    Returns:
      the original cBathy dictionary
           'ym': yfrf coords

           'yFRF': yfrf coords

           'epochtime': epoch time

           'xm': xfrf coords

           'xFRF': xfrf coords

           'depthKF': kalman filtered depth estimate (updated with only estimates below wave height threshold

           'depthfC': individual depth estimates

           'P': Process error

           'depthfCError: individual depth estimate error

           'surveyMeanTime': last time data was updated

           'elevation': negative depth KF values

           'time': date time objects for each filtered estimate

    """
    ##### define inital global variables for function
    version_prefix = 'cBKF-T'  # assume only one version
    #### Find which pickle to load
    best = DT.timedelta(
        3002)  # needs to be within X days to start to be considered
    pickList = glob.glob(
        '/home/number/cmtb/cBathy_Study/pickles/{}_*_TimeAvgcBathy*.pickle'.
        format(version_prefix))
    loadPickleFname = None
    # Sort through pickles containing good cBathy bathymetries
    for file in pickList:
        delta = cBathy['time'][0] - DT.datetime.strptime(
            file.split('/')[-1].split('_')[1], '%Y%m%dT%H%M%SZ')  # days old
        if delta.total_seconds() > 0 and delta.total_seconds(
        ) < best.total_seconds():
            best = delta  # the new delta is currently the best, save it as the best
            # change the current load name to the current best
            loadPickleFname = file  #'cBathy_Study/pickles/%s_%s_%s_TimeAvgcBathy.pickle' %(version_prefix, timerun, file.split('/')[-1].split('_')[2])

    ##### begin Running logic
    # first ensure that the wave data and cbathy have same time step,
    # if they don't interpolate the wavdata to the cbathy time stamp
    if ~np.in1d(rawspec['time'], cBathy['time']).all():
        # interpolate the rawspec to the cbathy time frame
        rawspec['Hs'] = np.interp(cBathy['epochtime'],
                                  xp=rawspec['epochtime'],
                                  fp=rawspec['Hs'])
        rawspec['epochtime'] = cBathy['epochtime']
    try:
        time, idxObs, idxcBathy = sb.timeMatch(
            rawspec['epochtime'][:],
            list(range(rawspec['epochtime'][:].shape[0])), cBathy['epochtime'],
            list(range(len(cBathy['time']))))
        # find idx of waves below this value
        badIdx = np.argwhere(
            rawspec['Hs'][idxObs.astype(int)] > waveHsThreshold).squeeze()
    except TypeError:  # when cbath== None
        badIdx = np.array([])
    if isinstance(cBathy['depthKF'], np.ma.masked_array):
        cBathy = replacecBathyMasksWithNans(cBathy)

    ##########################################
    # Begin Thresholded kalman filtered logic
    #########################################
    ttO = np.size(cBathy['time']) - np.size(
        badIdx)  # expected output time dimension
    depthKF = np.zeros(
        (ttO, cBathy['depthKF'].shape[1], cBathy['depthKF'].shape[2]))
    depthKFE, P, depthfC, depthfCE = np.zeros_like(depthKF), np.zeros_like(
        depthKF), np.zeros_like(depthKF), np.zeros_like(depthKF)
    timeO, etimeO, rc = np.zeros((ttO), dtype=object), np.zeros((ttO)), 0
    if cBathy == None and loadPickleFname != None and os.path.isfile(
            loadPickleFname):
        # need to catch the Nones before trying to loop over it
        # load Old Cbathy
        # print '          CBThresh: No cbathy found at this Time (dark?), using old Good Cbathy .... loading Pickle: %s' % loadPickleFname
        # with open(loadPickleFname, 'rb') as handle:
        #     cBathy = pickle.load(file=handle)
        pass  # don't make if its not new cBathy estimate
    else:
        for tt in range(
                len(cBathy['time'])
        ):  # this may need to be changed for not implmented error above
            # -- may need this for more time steps np.size(badIdx) < np.size(idxObs) and
            # figure out if we have good waves (createing good cbathy) then if so do the new kalman filter logic here
            if tt not in badIdx:  # if there's at least 1 good value,
                # cbathy at time tt is considered good!
                if rc >= 1:
                    cbathyold = {
                        'ym': cBathy['ym'],
                        'epochtime': etimeO[rc - 1],
                        'xm': cBathy['xm'],
                        'depthKF': depthKF[rc - 1],
                        'depthfC': depthfC[rc - 1],
                        'P': P[rc - 1],
                        'depthfCError': depthfCE[rc - 1],
                        # 'k',
                        # 'depth':,
                        # 'fB': ,
                        'time': timeO[rc - 1],
                        'depthKFError': depthKFE[rc - 1]
                    }
                elif loadPickleFname is not None and os.path.isfile(
                        loadPickleFname):
                    with open(loadPickleFname, 'rb') as handle:
                        cbathyold = pickle.load(handle)
                        print(
                            '     CBThresh: wave height good, Kalman filtering from %s'
                            % loadPickleFname)
                    if cbathyold['elevation'].shape != cBathy['depthKF'].shape[
                            1:]:  # load from background
                        print(
                            '  Loading from background, you changed your grid shape'
                        )
                        from getdatatestbed import getDataFRF
                        go = getDataFRF.getObs(cBathy['time'][0],
                                               cBathy['time'][-1])
                        full = go.getBathyGridcBathy()
                        cbathyold = sb.reduceDict(full, -1)
                        xinds = np.where(np.in1d(cbathyold['xm'],
                                                 cBathy['xm']))[0]
                        yinds = np.where(np.in1d(cbathyold['ym'],
                                                 cBathy['ym']))[0]
                        for key in list(cbathyold.keys()):
                            if key is 'xm':
                                cbathyold[key] = cbathyold[key][xinds]
                            elif key is 'ym':
                                cbathyold[key] = cbathyold[key][xinds]
                            elif key not in ['epochtime', 'time', 'xm', 'ym']:
                                cbathyold[key] = cbathyold[key][
                                    slice(yinds[0], yinds[-1] + 1),
                                    slice(xinds[0], xinds[-1] + 1)]
                else:
                    raise ImportError(
                        'You need a cBathy to seed the first kalman filter step '
                    )

                cBathySingle = extract_time(cBathy, tt)
                temp = cbathy_kalman_filter(cBathySingle, cbathyold,
                                            rawspec['Hs'])
                # overwrite old kalman filtered results with new kalman filtered results
                depthKF[rc] = np.ma.filled(
                    temp['depthKF'], fill_value=np.nan)  # temp['depthKF']
                depthKFE[rc] = np.ma.filled(temp['depthKFError'],
                                            fill_value=np.nan)
                P[rc] = np.ma.filled(temp['P'], fill_value=np.nan)
                depthfCE[rc] = np.ma.filled(temp['depthfCError'],
                                            fill_value=np.nan)
                depthfC[rc] = np.ma.filled(temp['depthfC'], fill_value=np.nan)
                timeO[rc] = temp['time']
                etimeO[rc] = temp['epochtime']
                rc += 1

            else:  # cbathy @ time tt is considered bad!
                pass
        if np.size(timeO) > 0:
            # Done creating the 'day's newcBathy output
            # save last file
            savePickleFname = '/home/number/cmtb/cBathy_Study/pickles/%s_%s_TimeAvgcBathy.pickle' % (
                version_prefix, timeO[-1].strftime('%Y%m%dT%H%M%SZ'))
            print(
                '      CBThresh: Kalman filtered, now saving pickle {}'.format(
                    savePickleFname))
            cBathyOut = {
                'ym': cBathy['ym'],
                'yFRF': cBathy['ym'],
                'epochtime': etimeO,
                'xm': cBathy['xm'],
                'xFRF': cBathy['xm'],
                'depthKF': depthKF,
                'depthfC': depthfC,
                'P': P,
                'depthfCError': depthfCE,
                'surveyMeanTime': etimeO[-1],
                'elevation': -depthKF,
                # 'k',
                # 'depth':,
                # 'fB': ,
                'time': timeO,
                'depthKFError': depthKFE
            }

            with open(savePickleFname, 'wb') as handle:
                # reduce if its more than one (still works on single dictionary)
                cBathyOutPick = sb.reduceDict(cBathyOut, -1)
                pickle.dump(cBathyOutPick,
                            file=handle,
                            protocol=pickle.HIGHEST_PROTOCOL)
        else:
            cBathyOut = None
    return cBathyOut
Esempio n. 7
0
def cBathy_VarianceLogic(cBathy,
                         variancePacket,
                         rawspec,
                         varianceThreshold=0.13,
                         percentThreshold=0.35):
    """Logic associated with creating the variance thresholded kalman filtered cBathy representation
        removes times that start before 1300 UTC and after 2100 UTC

    Args:
      cBathy: dictionary from go.getcBathy data
      rawspec: dictionary from go.getwavespec function
      waveHsThreshold: a decimal value for which to compare when generating the new kalman filter (Default value = 1.2)

    Returns:
      the original cBathy dictionary
           'ym': yfrf coords

           'yFRF': yfrf coords

           'epochtime': epoch time

           'xm': xfrf coords

           'xFRF': xfrf coords

           'depthKF': kalman filtered depth estimate (updated with only estimates below wave height threshold

           'depthfC': individual depth estimates

           'P': Process error

           'depthfCError: individual depth estimate error

           'surveyMeanTime': last time data was updated

           'elevation': negative depth KF values

           'time': date time objects for each filtered estimate

    """
    startTimeofDay = 13  # 0800 in EST (winter)
    endTimeofDay = 21  # 1600 in EST (winter)
    import getpass
    user = getpass.getuser()  #
    ##### define inital global variables for function
    version_prefix = 'cBKF-T'  # assume only one version
    #### Find which pickle to load
    best = DT.timedelta(
        3002)  # needs to be within X days to start to be considered
    pickList = glob.glob(
        '/home/{}/cmtb/cBathy_Study/pickles/{}_*_TimeAvgcBathy*.pickle'.format(
            user, version_prefix))
    loadPickleFname = None
    # Sort through pickles containing good cBathy bathymetries
    for file in pickList:
        delta = cBathy['time'][0] - DT.datetime.strptime(
            file.split('/')[-1].split('_')[1], '%Y%m%dT%H%M%SZ')  # days old
        if delta.total_seconds() > 0 and delta.total_seconds(
        ) < best.total_seconds():
            best = delta  # the new delta is currently the best, save it as the best
            # change the current load name to the current best
            loadPickleFname = file  # 'cBathy_Study/pickles/%s_%s_%s_TimeAvgcBathy.pickle' %(version_prefix, timerun, file.split('/')[-1].split('_')[2])

    ####################  begin Running logic  #########################################3
    # first ensure that the wave data and cbathy have same time step,
    # first remove cbathy's produced
    # if they don't interpolate the wavedata to the cbathy time stamp
    if ~np.in1d(rawspec['time'], cBathy['time']).all():
        # interpolate the rawspec to the cbathy time frame
        rawspec['Hs'] = np.interp(cBathy['epochtime'],
                                  xp=rawspec['epochtime'],
                                  fp=rawspec['Hs'])
        rawspec['epochtime'] = cBathy['epochtime']
    if ~np.in1d(variancePacket['time'], cBathy['time']).all():
        idxVar = np.argwhere(np.in1d(variancePacket['time'], cBathy['time']))
        variancePacket = sb.reduceDict(
            variancePacket,
            idxVar.squeeze())  # make sure variance matches cBathy
        idxCB = np.argwhere(np.in1d(cBathy['time'], variancePacket['time']))
        cBathy = sb.reduceDict(
            cBathy, idxCB.squeeze())  # make sure cBathy matches variance
    assert (cBathy['time'] == variancePacket['time']).all(), 'time check'

    try:
        badIdx = []
        for i in range(len(variancePacket['time'])):
            if (((variancePacket['bw'][i] > varianceThreshold).sum()/float(variancePacket['bw'][i].size)) > percentThreshold) \
                    or (variancePacket['time'][i].hour <= startTimeofDay) or (variancePacket['time'][i].hour >= endTimeofDay):
                badIdx.append(
                    i)  # find idx of variances waves below this value
        badIdx = np.array(badIdx, dtype=int)
    except TypeError:  # when cbath== None
        badIdx = np.array([])
    if isinstance(cBathy['depthKF'], np.ma.masked_array):
        cBathy = replacecBathyMasksWithNans(cBathy)

    ##########################################
    # Begin Thresholded kalman filtered logic
    #########################################
    ttO = np.size(cBathy['time']) - np.size(
        badIdx)  # expected output time dimension
    depthKF = np.zeros(
        (ttO, cBathy['depthKF'].shape[1], cBathy['depthKF'].shape[2]))
    depthKFE, P, depthfC, depthfCE = np.zeros_like(depthKF), np.zeros_like(
        depthKF), np.zeros_like(depthKF), np.zeros_like(depthKF)
    timeO, etimeO, rc = np.zeros((ttO), dtype=object), np.zeros((ttO)), 0
    # badIdx -= 1 # reset the identified idx's to those that will be iterated through
    if cBathy == None and loadPickleFname != None and os.path.isfile(
            loadPickleFname):
        pass  # don't make if its not new cBathy estimate
    else:
        for tt in range(
                len(cBathy['time'])
        ):  # this may need to be changed for not implmented error above
            # -- may need this for more time steps np.size(badIdx) < np.size(idxObs) and
            # figure out if we have good waves (createing good cbathy) then if so do the new kalman filter logic here
            if tt not in badIdx:  # if there's at least 1 good value,  -1 sets index starting at 1 as identified in badIdx
                # cbathy at time tt is considered good!
                if rc >= 1:
                    cbathyold = {
                        'ym': cBathy['ym'],
                        'epochtime': etimeO[rc - 1],
                        'xm': cBathy['xm'],
                        'depthKF': depthKF[rc - 1],
                        'depthfC': depthfC[rc - 1],
                        'P': P[rc - 1],
                        'depthfCError': depthfCE[rc - 1],
                        'time': timeO[rc - 1],
                        'depthKFError': depthKFE[rc - 1]
                    }
                elif loadPickleFname is not None and os.path.isfile(
                        loadPickleFname):
                    with open(loadPickleFname, 'rb') as handle:
                        cbathyold = pickle.load(handle)
                        print((
                            '     cBKF-T: good cBathy, Kalman filtering from %s'
                            % loadPickleFname))
                    if cbathyold['depthKF'].shape != cBathy['depthKF'].shape[
                            1:]:  # load from background
                        print(
                            '  Loading from background, you changed your grid shape'
                        )
                        from getdatatestbed import getDataFRF
                        go = getDataFRF.getObs(cBathy['time'][0],
                                               cBathy['time'][-1])
                        full = go.getBathyGridcBathy()
                        cbathyold = sb.reduceDict(full, -1)
                        xinds = np.where(np.in1d(cbathyold['xm'],
                                                 cBathy['xm']))[0]
                        yinds = np.where(np.in1d(cbathyold['ym'],
                                                 cBathy['ym']))[0]
                        for key in list(cbathyold.keys()):
                            if key is 'xm':
                                cbathyold[key] = cbathyold[key][xinds]
                            elif key is 'ym':
                                cbathyold[key] = cbathyold[key][xinds]
                            elif key not in ['epochtime', 'time', 'xm', 'ym']:
                                cbathyold[key] = cbathyold[key][
                                    slice(yinds[0], yinds[-1] + 1),
                                    slice(xinds[0], xinds[-1] + 1)]
                else:
                    raise ImportError(
                        'You need a cBathy to seed the first kalman filter step '
                    )

                cBathySingle = extract_time(cBathy, tt)
                temp = cbathy_kalman_filter(cBathySingle, cbathyold,
                                            rawspec['Hs'])
                # overwrite old kalman filtered results with new kalman filtered results
                depthKF[rc] = np.ma.filled(temp['depthKF'], fill_value=np.nan)
                depthKFE[rc] = np.ma.filled(temp['depthKFError'],
                                            fill_value=np.nan)
                P[rc] = np.ma.filled(temp['P'], fill_value=np.nan)
                depthfCE[rc] = np.ma.filled(temp['depthfCError'],
                                            fill_value=np.nan)
                depthfC[rc] = np.ma.filled(temp['depthfC'], fill_value=np.nan)
                timeO[rc] = temp['time']
                etimeO[rc] = temp['epochtime']
                rc += 1  # add to the record counter
            else:  # cbathy @ time tt is considered bad!
                pass
        if np.size(timeO) > 0:
            # Done creating the 'day's newcBathy output save last file
            savePickleFname = '/home/{}/cmtb/cBathy_Study/pickles/{}_{}_TimeAvgcBathy.pickle'.format(
                user, version_prefix, timeO[-1].strftime('%Y%m%dT%H%M%SZ'))
            print(
                ('      cBKF-T: Kalman filtered, now saving pickle {}'.format(
                    savePickleFname)))
            cBathyOut = {
                'ym': cBathy['ym'],
                'yFRF': cBathy['ym'],
                'epochtime': etimeO,
                'xm': cBathy['xm'],
                'xFRF': cBathy['xm'],
                'depthKF': depthKF,
                'depthfC': depthfC,
                'P': P,
                'depthfCError': depthfCE,
                'surveyMeanTime': etimeO[-1],
                'elevation': -depthKF,
                'time': timeO,
                'depthKFError': depthKFE
            }

            with open(savePickleFname, 'wb') as handle:
                # reduce if its more than one (still works on single dictionary)
                cBathyOutPick = sb.reduceDict(cBathyOut, -1)
                pickle.dump(cBathyOutPick,
                            file=handle,
                            protocol=pickle.HIGHEST_PROTOCOL)
        else:
            cBathyOut = None
    return cBathyOut
Esempio n. 8
0
def plotAltimeterSummary(minTime, maxTime, **kwargs):
    """Function that makes plots for altimeters and beach change.

    Args:
        minTime: start time in epoch
        maxTime: end time in epoch

    Keyword Arguments:
        frontEnd: utilizes the switch between chl and FRF servers, default will use FRF server

    """
    plt.style.use(['seaborn-poster'])
    chlFront = "https://chldata.erdc.dren.mil/thredds/dodsC/frf/"
    frfFront = "http://bones/thredds/dodsC/FRF/"
    frontEnd = kwargs.get('frontEnd', frfFront)
    url1 = "geomorphology/altimeter/Alt940-340-altimeter/Alt940-340-altimeter.ncml"
    url2 = "geomorphology/altimeter/Alt940-250-altimeter/Alt940-250-altimeter.ncml"
    url3 = "geomorphology/altimeter/Alt940-200-altimeter/Alt940-200-altimeter.ncml"
    # 860 line
    url4 = "geomorphology/altimeter/Alt861-350-altimeter/Alt861-350-altimeter.ncml"
    url5 = "geomorphology/altimeter/Alt861-300-altimeter/Alt861-300-altimeter.ncml"
    url6 = "geomorphology/altimeter/Alt861-250-altimeter/Alt861-250-altimeter.ncml"
    url7 = "geomorphology/altimeter/Alt861-200-altimeter/Alt861-200-altimeter.ncml"
    url8 = "geomorphology/altimeter/Alt861-150-altimeter/Alt861-150-altimeter.ncml"
    # 769 line
    url9 = "geomorphology/altimeter/Alt769-350-altimeter/Alt769-350-altimeter.ncml"
    url10 = "geomorphology/altimeter/Alt769-300-altimeter/Alt769-300-altimeter.ncml"
    url11 = "geomorphology/altimeter/Alt769-250-altimeter/Alt769-250-altimeter.ncml"
    url12 = "geomorphology/altimeter/Alt769-200-altimeter/Alt769-200-altimeter.ncml"
    url13 = "geomorphology/altimeter/Alt769-150-altimeter/Alt769-150-altimeter.ncml"
    # original Line
    url14 = "geomorphology/altimeter/Alt03-altimeter/Alt03-altimeter.ncml"
    url15 = "geomorphology/altimeter/Alt04-altimeter/Alt04-altimeter.ncml"
    url16 = "geomorphology/altimeter/Alt05-altimeter/Alt05-altimeter.ncml"
    # get wave record and smush to one record
    go = getDataFRF.getObs(DT.datetime.fromtimestamp(minTime),
                           DT.datetime.fromtimestamp(maxTime))
    w26 = go.getWaveSpec('waverider-26m')
    w17 = go.getWaveSpec('waverider-17m')
    if w17 is not None:
        w17New = sb.reduceDict(
            w17,
            np.argwhere(~np.in1d(w17['time'], w26['time'])).squeeze())
    gm = getDataFRF.getDataTestBed(
        DT.datetime.fromtimestamp(minTime - 365 * 24 * 60 * 60),
        DT.datetime.fromtimestamp(maxTime))
    bathy = gm.getBathyIntegratedTransect(forceReturnAll=True,
                                          xbounds=[0, 500],
                                          ybounds=[600, 1000])

    diffTime, rmses = [], []
    for tt in range(bathy['time'].shape[0] - 1):
        rmses.append(
            np.sqrt((np.square(bathy['elevation'][tt + 1] -
                               bathy['elevation'][tt])).mean()))
        timeDiffTemp = bathy['time'][tt + 1] - bathy['time'][tt]
        diffTime.append(timeDiffTemp / 2 + bathy['time'][tt])
    ## plot z by time (add artificial offset in elevation for each cross-shore gauge)
    multiplier = 10
    maxTimeDT = nc.num2date(maxTime, 'seconds since 1970-01-01')
    minTimeDT = nc.num2date(minTime, 'seconds since 1970-01-01')
    marker = 2  # marker size for wave plot
    lw = 1
    #######################################################################################
    fig = plt.figure(figsize=(12, 12))
    plt.suptitle(
        'RMSE calculated between\nyBounds = [600, 1000] xBounds = [0, 500]\nto avoid pier hole'
    )
    ax0 = plt.subplot2grid((8, 8), (0, 0), colspan=8, rowspan=1)
    ax0.plot(w26['time'], w26['Hs'], 'm.', label='26m', ms=marker)
    if w17New is not None:
        ax0.plot(w17New['time'], w17New['Hs'], 'r.', label='17m', ms=marker)
    ax0.set_ylabel('wave\nheight [m]', fontsize=12)
    plt.legend()
    plt.gca().axes.get_xaxis().set_visible(False)
    ##############################
    ax00 = plt.subplot2grid((8, 8), (1, 0), colspan=8, rowspan=1, sharex=ax0)
    ax00.plot(diffTime,
              rmses,
              color='black',
              marker="_",
              linestyle='solid',
              ms=150,
              linewidth=lw)
    ax00.set_ylabel('RMSE between\nsurveys [m]', fontsize=12)
    for tt, time in enumerate(bathy['time']):
        ax00.plot([time, time], [0, max(rmses)],
                  'C1',
                  linestyle='dashdot',
                  linewidth=lw)
    plt.gca().axes.get_xaxis().set_visible(False)
    ##############################
    ax1 = plt.subplot2grid((8, 8), (2, 0), colspan=8, rowspan=6, sharex=ax0)
    for uu, url in enumerate([url1, url2, url3, url14, url15, url16]):
        print(frontEnd + url)
        ncfile = nc.Dataset(frontEnd + url)
        try:
            time0 = nc.num2date(ncfile['time'][:], ncfile['time'].units)
            bottom0 = ncfile['bottomElevation'][:]
        except:
            bottom0 = ncfile['bottomElevation'][:-1]
            time0 = nc.num2date(ncfile['time'][:-1], ncfile['time'].units)
        coord = gp.FRFcoord(ncfile['Longitude'][:], ncfile['Latitude'][:])
        ax1.plot(time0,
                 np.tile(coord['xFRF'], len(bottom0)) + bottom0 * multiplier,
                 'b.',
                 ms=marker)
        if url is url16:
            ax1.plot(time0,
                     np.tile(coord['xFRF'], len(bottom0)) +
                     bottom0 * multiplier,
                     'b.',
                     label='940m',
                     ms=marker)
        ax1.plot([minTimeDT, maxTimeDT], [coord['xFRF'], coord['xFRF']],
                 'b',
                 linestyle='dotted',
                 linewidth=lw)
    ##############################
    for url in [url4, url5, url6, url7, url8]:  #,
        print(url)
        ncfile = nc.Dataset(frontEnd + url)
        try:
            time0 = nc.num2date(ncfile['time'][:], ncfile['time'].units)
            bottom0 = ncfile['bottomElevation'][:]
        except:
            bottom0 = ncfile['bottomElevation'][:-1]
            time0 = nc.num2date(ncfile['time'][:-1], ncfile['time'].units)
        coord = gp.FRFcoord(ncfile['Longitude'][:], ncfile['Latitude'][:])
        ax1.plot(time0,
                 np.tile(coord['xFRF'], len(bottom0)) + bottom0 * multiplier,
                 'r.',
                 ms=marker)
        if url is url8:
            ax1.plot(time0,
                     np.tile(coord['xFRF'], len(bottom0)) +
                     bottom0 * multiplier,
                     'r.',
                     label='861m',
                     ms=marker)
        ax1.plot([minTimeDT, maxTimeDT], [coord['xFRF'], coord['xFRF']],
                 'r',
                 linestyle='dotted',
                 linewidth=lw)
    ##############################
    for url in [url9, url10, url11, url12, url13]:
        print(url)
        ncfile = nc.Dataset(frontEnd + url)
        try:
            time0 = nc.num2date(ncfile['time'][:], ncfile['time'].units)
            bottom0 = ncfile['bottomElevation'][:]
        except:
            bottom0 = ncfile['bottomElevation'][:-1]
            time0 = nc.num2date(ncfile['time'][:-1], ncfile['time'].units)
        coord = gp.FRFcoord(ncfile['Longitude'][:], ncfile['Latitude'][:])
        ax1.plot(time0,
                 np.tile(coord['xFRF'], len(bottom0)) + bottom0 * multiplier,
                 'c.',
                 ms=marker)
        if url is url13:
            ax1.plot(time0,
                     np.tile(coord['xFRF'], len(bottom0)) +
                     bottom0 * multiplier,
                     'c.',
                     label='769m',
                     ms=marker)
        ax1.plot([minTimeDT, maxTimeDT], [coord['xFRF'], coord['xFRF']],
                 'c',
                 linestyle='dotted',
                 linewidth=lw)

    ##############################
    ax1.set_xlim([minTimeDT, maxTimeDT])
    ax1.legend()

    plt.setp(plt.xticks()[1], rotation=30, ha='right')
    ax1.set_xlabel('time')
    ax1.set_ylabel(
        'frf cross-shore location (with change in elevation plotted)',
        fontsize=12)

    fname = '/todaysPlots/TodaysAltimeterSummary_{}.png'.format(
        kwargs.get('duration', ''))
    plt.savefig(fname)
    print('Saved File Here: {}'.format(fname))
    plt.close()
    shutil.copy(fname, '/mnt/gaia/rootdir/CMTB/')