Beispiel #1
0
def CMSF_velData(cmsfDict, station, dThresh=None):
    """
    this is a little function I wrote that will do the heavy lifting of pulling the current data from a particular gage,
    finds the closest model node to that gage, time matches the data, and returns the variables that need to be
    handed to obsVmod_TS to make pretty plots.
    :param cmsfDict: keys (that are used...):
                    'time' - this needs to be in epochtime
                    'aveE' - average eastward velocity
                    'aveN' - average northward velocity
    :param station: this is the stationname that will get handed to getCurrents, a gagenumber would (should?) also work
    :return: dictionary with keys:
             'time' - epochtimes of the matched data
             'aveEobs' - time-matched observed eastward velocity
             'aveNobs' - time-matched observed northward velocity
             'aveEmod' - time-matched model eastward velocity
             'aveNmod' - time-matched model northward velocity
    """
    # initialize this class
    timeunits = 'seconds since 1970-01-01 00:00:00'
    modTime = nc.num2date(cmsfDict['time'], timeunits)
    go = getObs(modTime[0] - DT.timedelta(minutes=3),
                modTime[-1] + DT.timedelta(minutes=3))

    # get my obs_dict
    obsV = go.getCurrents(station)
    aveUobs = obsV['aveU']
    aveVobs = obsV['aveV']
    obsTime = obsV['time']
    xFRFobs = obsV['xFRF']
    yFRFobs = obsV['yFRF']

    # find the closest node and pull that data
    ind, dist = findNearestUnstructNode(xFRFobs, yFRFobs, cmsfDict)
    if dThresh is None:
        pass
    else:
        assert dist <= dThresh, 'Error: this grid has no nodes within %s of gage %s.' % (
            dThresh, station)

    modTime = cmsfDict['time']
    aveUmod = cmsfDict['aveE'][:][ind]
    aveVmod = cmsfDict['aveN'][:][ind]

    # run the time matching.
    out = {}
    out['time'], out['aveEobs'], out['aveEmod'] = sb.timeMatch(
        obsTime, aveUobs, modTime, aveUmod)
    time, out['aveNobs'], out['aveNmod'] = sb.timeMatch(
        obsTime, aveVobs, modTime, aveVmod)

    return out
Beispiel #2
0
def CMSF_wlData(cmsfDict, station, dThresh=None):
    """
    this is a little function I wrote that will do the heavy lifting of pulling the wl data from a particular gage,
    finds the closest model node to that gage, time matches the data, and returns the variables that need to be
    handed to obsVmod_TS to make pretty plots.
    :param cmsfDict: keys (that are used...):
                    'time' - this needs to be in epochtime
                    'waterLevel' - water level from the CSMF model
    :param station: this is the stationname that will get handed to getGageWL, a gagenumber would (should?) also work
    :return: dictionary with keys:
             'time' - epochtimes of the matched data
             'obsWL' - time-matched observed eastward velocity
             'modWL' - time-matched model northward velocity
    """
    # initialize this class
    timeunits = 'seconds since 1970-01-01 00:00:00'
    modTime = nc.num2date(cmsfDict['time'], timeunits)
    go = getObs(modTime[0] - DT.timedelta(minutes=3),
                modTime[-1] + DT.timedelta(minutes=3))

    # get my obs_dict
    obsWLdict = go.getGaugeWL(station)
    if 'wl' not in obsWLdict.keys():
        out = None
    else:
        obsWL = obsWLdict['wl']
        obsTime = obsWLdict['time']
        xFRFobs = obsWLdict['xFRF']
        yFRFobs = obsWLdict['yFRF']

        # find the closest node and pull that data
        ind, dist = findNearestUnstructNode(xFRFobs, yFRFobs, cmsfDict)
        if dThresh is None:
            pass
        else:
            assert dist <= dThresh, 'Error: this grid has no nodes within %s of gage %s.' % (
                dThresh, station)

        modTime = cmsfDict['time']
        modWL = cmsfDict['waterLevel'][:, ind]

        # run the time matching.
        out = {}
        out['time'], out['obsWL'], out['modWL'] = sb.timeMatch(
            obsTime, obsWL, modTime, modWL)

        # make it output a datetime
        if isinstance(out['time'][0], DT.datetime):
            pass
        else:
            modTime = [nc.num2date(ii, timeunits) for ii in out['time']]
            del out['time']
            out['time'] = modTime

    return out
Beispiel #3
0
def getStats(startTime, endTime, model, prefix, station, go, gm):
    # Get observation data
    wo = go.getWaveSpec(station)
    if 'time' not in wo:
        return None, None, None, None, None, None
    if station in go.directional:  # directional spectra
        if prefix is not 'FP':  # half plane
            wo['dWED'], wo['wavedirbin'] = sbwave.HPchop_spec(wo['dWED'],
                                                              wo['wavedirbin'],
                                                              angadj=angadj)
        obsStats = sbwave.waveStat(wo['dWED'], wo['wavefreqbin'],
                                   wo['wavedirbin'])
    else:  # non-directional
        obsStats = sbwave.stats1D(wo['fspec'], wo['wavefreqbin'])
    # Get model results
    with warnings.catch_warnings(record=True) as w:
        wm = gm.getWaveSpecModel(prefix, station, model=model)
        if wm == None:
            return [None] * 6
        if w != []:
            assert str(w[0].message) == (
                "Warning: 'partition' will ignore the 'mask' of the MaskedArray."
            ), w[0].message
    modStats = sbwave.waveStat(wm['dWED'], wm['wavefreqbin'], wm['wavedirbin'])
    # Time match
    time, obsi, modi = sb.timeMatch(wo['epochtime'],
                                    np.arange(wo['time'].shape[0]),
                                    wm['epochtime'],
                                    np.arange(wm['time'].shape[0]))
    if time.size == 0:
        print(
            'No matching time between observation and model results for station '
            + station)
        return [None] * 6
    if station in go.directional:
        plotList = ['Hm0', 'Tm', 'sprdF', 'sprdD', 'Tp', 'Dm']
    else:
        plotList = ['Hm0', 'Tm', 'sprdF', 'Tp']
    return time, obsStats, modStats, plotList, obsi, modi
Beispiel #4
0
def CMSanalyze(startTime, inputDict):
    """This runs the post process script for CMS wave
    will create plots and netcdf files at request

    Args:
        inputDict (dict): this is an input dictionary that was generated with the
            keys from the project input yaml file
        startTime (str): input start time with datestring in format YYYY-mm-ddThh:mm:ssZ

    :return:
        plots in the inputDict['workingDirectory'] location
        netCDF files to the inputDict['netCDFdir'] directory

    """
    # ___________________define Global Variables___________________________________
    if 'pFlag' in inputDict:
        pFlag = inputDict['pFlag']
    else:
        pFlag = True  # will plot true by default
    version_prefix = inputDict['version_prefix']
    path_prefix = inputDict[
        'path_prefix']  # + "/%s/" %version_prefix   # 'data/CMS/%s/' % version_prefix  # for organizing data
    simulationDuration = inputDict['simulationDuration']
    if 'netCDFdir' in inputDict:
        Thredds_Base = inputDict['netCDFdir']
    else:
        whoami = check_output('whoami', shell=True)[:-1]
        Thredds_Base = '/home/%s/thredds_data/' % whoami
    if 'THREDDS' in inputDict:
        server = inputDict['THREDDS']
    else:
        print('Chosing CHL thredds by Default, this may be slower!')
        server = 'CHL'

    # _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
    # establishing the resolution of the input datetime
    d1 = DT.datetime.strptime(startTime, '%Y-%m-%dT%H:%M:%SZ')
    d2 = d1 + DT.timedelta(0, simulationDuration * 3600, 0)
    datestring = d1.strftime('%Y-%m-%dT%H%M%SZ')  # a string for file names
    fpath = os.path.join(path_prefix, datestring)
    model = 'CMS'
    # ____________________________________________________________________________
    if version_prefix == 'HP':
        full = False
    elif version_prefix == 'UNTUNED':
        full = False
    # _____________________________________________________________________________

    print '\nBeggining of Analyze Script\nLooking for file in ' + fpath
    print '\nData Start: %s  Finish: %s' % (d1, d2)
    print 'Analyzing simulation'
    go = getDataFRF.getObs(d1, d2, server)  # setting up get data instance
    prepdata = STPD.PrepDataTools(
    )  # initializing instance for rotation scheme
    cio = cmsIO(
    )  # =pathbase) looks for model output files in folder to analyze
    ######################################################################################################################
    ######################################################################################################################
    ##################################   Load Data Here / Massage Data Here   ############################################
    ######################################################################################################################
    ######################################################################################################################
    t = DT.datetime.now()
    print 'Loading files '
    cio.ReadCMS_ALL(fpath)  # load all files
    stat_packet = cio.stat_packet  # unpack dictionaries from class instance
    obse_packet = cio.obse_Packet
    radStress_packet = cio.radSt_packet
    breaking_packet = cio.break_packet
    dep_pack = cio.dep_Packet
    dep_pack['bathy'] = np.expand_dims(dep_pack['bathy'], axis=0)
    # convert dep_pack to proper dep pack with keys
    wave_pack = cio.wave_Packet
    print 'Loaded files in %s' % (DT.datetime.now() - t)
    # correct model outout angles from STWAVE(+CCW) to Geospatial (+CW)
    stat_packet['WaveDm'] = testbedutils.anglesLib.STWangle2geo(
        stat_packet['WaveDm'])
    # correct angles
    stat_packet['WaveDm'] = testbedutils.anglesLib.angle_correct(
        stat_packet['WaveDm'])
    obse_packet['ncSpec'] = np.ones(
        (obse_packet['spec'].shape[0], obse_packet['spec'].shape[1],
         obse_packet['spec'].shape[2], 72)) * 1e-6
    # interp = np.ones((obse_packet['spec'].shape[0], obse_packet['spec'].shape[1], wavefreqbin.shape[0],
    #                   obse_packet['spec'].shape[3])) * 1e-6  ### TO DO marked for removal
    for station in range(0, np.size(obse_packet['spec'], axis=1)):
        # for tt in range(0, np.size(obse_packet['spec'], axis=0)):  # interp back to 62 frequencies
        #         f = interpolate.interp2d(obse_packet['wavefreqbin'], obse_packet['directions'],
        #                                  obse_packet['spec'][tt, station, :, :].T, kind='linear')
        # interp back to frequency bands that FRF data are kept in
        # interp[tt, station, :, :] = f(wavefreqbin, obse_packet['directions']).T

        # rotate the spectra back to true north
        obse_packet['ncSpec'][:, station, :, :], obse_packet[
            'ncDirs'] = prepdata.grid2geo_spec_rotate(
                obse_packet['directions'], obse_packet['spec'][:,
                                                               station, :, :]
            )  # interp[:, station, :, :]) - this was with interp
        # now converting m^2/Hz/radians back to m^2/Hz/degree
        # note that units of degrees are on the denominator which requires a deg2rad conversion instead of rad2deg
        obse_packet['ncSpec'][:, station, :, :] = np.deg2rad(
            obse_packet['ncSpec'][:, station, :, :])
    obse_packet['modelfreqbin'] = obse_packet['wavefreqbin']
    obse_packet['wavefreqbin'] = obse_packet[
        'wavefreqbin']  # wavefreqbin  # making sure output frequency bins now match the freq that were interped to

    ######################################################################################################################
    ######################################################################################################################
    ##################################  Spatial Data HERE     ############################################################
    ######################################################################################################################
    ######################################################################################################################
    gridPack = prepdata.makeCMSgridNodes(float(cio.sim_Packet[0]),
                                         float(cio.sim_Packet[1]),
                                         float(cio.sim_Packet[2]),
                                         dep_pack['dx'], dep_pack['dy'],
                                         dep_pack['bathy'])  # dims [t, x, y]
    # ################################
    #        Make NETCDF files       #
    # ################################
    if np.median(gridPack['elevation']) < 0:
        gridPack['elevation'] = -gridPack['elevation']
    fldrArch = os.path.join(model, version_prefix)
    spatial = {
        'time':
        nc.date2num(wave_pack['time'],
                    units='seconds since 1970-01-01 00:00:00'),
        'station_name':
        'Regional Simulation Field Data',
        'waveHs':
        np.transpose(wave_pack['waveHs'],
                     (0, 2, 1)),  # put into dimensions [t, y, x]
        'waveTm':
        np.transpose(np.ones_like(wave_pack['waveHs']) * -999, (0, 2, 1)),
        'waveDm':
        np.transpose(wave_pack['waveDm'],
                     (0, 2, 1)),  # put into dimensions [t, y, x]
        'waveTp':
        np.transpose(wave_pack['waveTp'],
                     (0, 2, 1)),  # put into dimensions [t, y, x]
        'radStressX':
        np.transpose(radStress_packet['radStressX'], (0, 2, 1)),
        'radStressY':
        np.transpose(radStress_packet['radStressY'], (0, 2, 1)),
        'dissipation':
        np.transpose(breaking_packet['dissipation'], (0, 2, 1)),
        'bathymetry':
        np.transpose(gridPack['elevation'],
                     (0, 2, 1)),  # put into dimensions [t, y, x]
        'latitude':
        gridPack[
            'latitude'],  # put into dimensions [t, y, x] - NOT WRITTEN TO FILE
        'longitude':
        gridPack[
            'longitude'],  # put into dimensions [t, y, x] - NOT WRITTEN TO FILE
        'xFRF':
        gridPack['xFRF'],  # put into dimensions [t, y, x]
        'yFRF':
        gridPack['yFRF'],  # put into dimensions [t, y, x]
        ######################
        'DX':
        dep_pack['dx'],
        'DX':
        dep_pack['dy'],
        'NI':
        dep_pack['NI'],
        'NJ':
        dep_pack['NJ'],
        'grid_azimuth':
        gridPack['azimuth']
    }

    TdsFldrBase = os.path.join(Thredds_Base, fldrArch)
    NCpath = sb.makeNCdir(Thredds_Base,
                          os.path.join(version_prefix, 'Field'),
                          datestring,
                          model=model)
    # make the name of this nc file
    NCname = 'CMTB-waveModels_{}_{}_Field_{}.nc'.format(
        model, version_prefix, datestring)
    fieldOfname = os.path.join(
        NCpath, NCname
    )  # TdsFldrBase + '/CMTB-waveModels_CMS_{}_Local-Field_%s.nc'.format(version_prefix, datestring)

    if not os.path.exists(TdsFldrBase):
        os.makedirs(
            TdsFldrBase)  # make the directory for the thredds data output
    if not os.path.exists(os.path.join(TdsFldrBase, 'Field', 'Field.ncml')):
        inputOutput.makencml(
            os.path.join(TdsFldrBase, 'Field',
                         'Field.ncml'))  # remake the ncml if its not there
    # make file name strings
    flagfname = os.path.join(fpath, 'Flags{}.out.txt'.format(
        datestring))  # startTime # the name of flag file
    fieldYaml = 'yaml_files/waveModels/%s/Field_globalmeta.yml' % (fldrArch
                                                                   )  # field
    varYaml = 'yaml_files/waveModels/%s/Field_var.yml' % (fldrArch)
    assert os.path.isfile(
        fieldYaml
    ), 'NetCDF yaml files are not created'  # make sure yaml file is in place
    makenc.makenc_field(data_lib=spatial,
                        globalyaml_fname=fieldYaml,
                        flagfname=flagfname,
                        ofname=fieldOfname,
                        var_yaml_fname=varYaml)
    ###################################################################################################################
    ###############################   Plotting  Below   ###############################################################
    ###################################################################################################################
    dep_pack['bathy'] = np.transpose(dep_pack['bathy'],
                                     (0, 2, 1))  # dims [t, y, x]
    plotParams = [('waveHs', '$m$'), ('bathymetry', 'NAVD88 $[m]$'),
                  ('waveTp', '$s$'), ('waveDm', '$degTn$')]
    if pFlag == True:
        for param in plotParams:
            print '    plotting %s...' % param[0]
            spatialPlotPack = {
                'title': 'Regional Grid: %s' % param[0],
                'xlabel': 'Longshore distance [m]',
                'ylabel': 'Cross-shore distance [m]',
                'field': spatial[param[0]],
                'xcoord': spatial['xFRF'],
                'ycoord': spatial['yFRF'],
                'cblabel': '%s - %s' % (param[0], param[1]),
                'time': nc.num2date(spatial['time'],
                                    'seconds since 1970-01-01')
            }
            fnameSuffix = 'figures/CMTB_CMS_%s_%s' % (version_prefix, param[0])
            if param[0] == 'waveHs':
                oP.plotSpatialFieldData(dep_pack,
                                        spatialPlotPack,
                                        os.path.join(fpath, fnameSuffix),
                                        nested=0,
                                        directions=spatial['waveDm'])
            else:
                oP.plotSpatialFieldData(dep_pack,
                                        spatialPlotPack,
                                        os.path.join(fpath, fnameSuffix),
                                        nested=0)
            # now make a gif for each one, then delete pictures
            fList = sorted(glob.glob(fpath + '/figures/*%s*.png' % param[0]))
            sb.makegif(
                fList, fpath + '/figures/CMTB_%s_%s_%s.gif' %
                (version_prefix, param[0], datestring))
            [os.remove(ff) for ff in fList]

    ######################################################################################################################
    ######################################################################################################################
    ##################################  Wave Station Files HERE (loop) ###################################################
    ######################################################################################################################
    ######################################################################################################################

    # this is a list of file names to be made with station data from the parent simulation
    stationList = [
        'waverider-26m', 'waverider-17m', 'awac-11m', '8m-array', 'awac-6m',
        'awac-4.5m', 'adop-3.5m', 'xp200m', 'xp150m', 'xp125m'
    ]
    for gg, station in enumerate(stationList):

        try:
            # generate yaml file name
            stat_yaml_fname = 'yaml_files/waveModels/{}/Station_var.yml'.format(
                fldrArch)
            globalyaml_fname = 'yaml_files/waveModels/{}/Station_globalmeta.yml'.format(
                fldrArch)
            # go get data or locations depending on if we're plotting against data
            if pFlag == True:
                w = go.getWaveSpec(station)  # go get all data
            else:
                w = go.getWaveGaugeLoc(station)

            stat_data = {
                'time':
                nc.date2num(stat_packet['time'][:],
                            units='seconds since 1970-01-01 00:00:00'),
                'waveHs':
                stat_packet['waveHs'][:, gg],
                'waveTm':
                np.ones_like(stat_packet['waveHs'][:, gg]) *
                -999,  # this isn't output by model, but put in fills to stay constant
                'waveDm':
                stat_packet['WaveDm'][:, gg],
                'waveTp':
                stat_packet['Tp'][:, gg],
                'waterLevel':
                stat_packet['waterLevel'][:, gg],
                'swellHs':
                stat_packet['swellHs'][:, gg],
                'swellTp':
                stat_packet['swellTp'][:, gg],
                'swellDm':
                stat_packet['swellDm'][:, gg],
                'seaHs':
                stat_packet['seaHs'][:, gg],
                'seaTp':
                stat_packet['seaTp'][:, gg],
                'seaDm':
                stat_packet['seaDm'][:, gg],
                'station_name':
                station,
                'directionalWaveEnergyDensity':
                obse_packet['ncSpec'][:, gg, :, :],
                'waveDirectionBins':
                obse_packet['ncDirs'],
                'waveFrequency':
                obse_packet['wavefreqbin'],
                ###############################
                'DX':
                dep_pack['dx'],
                'DY':
                dep_pack['dy'],
                'NI':
                dep_pack['NI'],
                'NJ':
                dep_pack['NJ'],
                'grid_azimuth':
                gridPack['azimuth']
            }
            try:
                stat_data['Latitude'] = w['latitude']
                stat_data['Longitude'] = w['longitude']
            except KeyError:  # this should be rectified
                stat_data['Latitude'] = w['lat']
                stat_data['Longitude'] = w['lon']
            # Name files and make sure server directory has place for files to go
            print 'making netCDF for model output at %s ' % station
            TdsFldrBase = os.path.join(Thredds_Base, fldrArch, station)

            NCpath = sb.makeNCdir(Thredds_Base,
                                  os.path.join(version_prefix, station),
                                  datestring,
                                  model='CMS')
            # make the name of this nc file
            NCname = 'CMTB-waveModels_{}_{}_{}_{}.nc'.format(
                model, version_prefix, station, datestring)
            outFileName = os.path.join(NCpath, NCname)

            if not os.path.exists(TdsFldrBase):
                os.makedirs(
                    TdsFldrBase
                )  # make the directory for the file/ncml to go into
            if not os.path.exists(os.path.join(TdsFldrBase,
                                               station + '.ncml')):
                inputOutput.makencml(
                    os.path.join(TdsFldrBase, station + '.ncml'))
            # make netCDF
            makenc.makenc_Station(stat_data,
                                  globalyaml_fname=globalyaml_fname,
                                  flagfname=flagfname,
                                  ofname=outFileName,
                                  stat_yaml_fname=stat_yaml_fname)

            print "netCDF file's created for station: %s " % station
            ###################################################################################################################
            ###############################   Plotting  Below   ###############################################################
            ###################################################################################################################

            if pFlag == True and 'time' in w:
                if full == False:
                    w['dWED'], w['wavedirbin'] = sbwave.HPchop_spec(
                        w['dWED'], w['wavedirbin'], angadj=70)
                obsStats = sbwave.waveStat(w['dWED'], w['wavefreqbin'],
                                           w['wavedirbin'])

                modStats = sbwave.waveStat(
                    obse_packet['ncSpec'][:, gg, :, :],
                    obse_packet['wavefreqbin'],
                    obse_packet['ncDirs'])  # compute model stats here

                time, obsi, modi = sb.timeMatch(
                    nc.date2num(w['time'], 'seconds since 1970-01-01'),
                    np.arange(w['time'].shape[0]),
                    nc.date2num(stat_packet['time'][:],
                                'seconds since 1970-01-01'),
                    np.arange(len(stat_packet['time'])))  # time match

                for param in modStats:  # loop through each bulk statistic
                    if len(time) > 1 and param in [
                            'Hm0', 'Tm', 'sprdF', 'sprdD', 'Tp', 'Dm'
                    ]:
                        print '    plotting %s: %s' % (station, param)
                        if param in ['Tp', 'Tm10']:
                            units = 's'
                            title = '%s period' % param
                        elif param in ['Hm0']:
                            units = 'm'
                            title = 'Wave Height %s ' % param
                        elif param in ['Dm', 'Dp']:
                            units = 'degrees'
                            title = 'Direction %s' % param
                        elif param in ['sprdF', 'sprdD']:
                            units = '_.'
                            title = 'Spread %s ' % param

                        # now run plots
                        p_dict = {
                            'time': nc.num2date(time,
                                                'seconds since 1970-01-01'),
                            'obs': obsStats[param][obsi.astype(int)],
                            'model': modStats[param][modi.astype(int)],
                            'var_name': param,
                            'units':
                            units,  # ) -> this will be put inside a tex math environment!!!!
                            'p_title': title
                        }

                        ofname = os.path.join(
                            fpath, 'figures/Station_%s_%s_%s.png' %
                            (station, param, datestring))
                        stats = obs_V_mod_TS(
                            ofname,
                            p_dict,
                            logo_path='ArchiveFolder/CHL_logo.png')

                        if station == 'waverider-26m' and param == 'Hm0':
                            # this is a fail safe to abort run if the boundary conditions don't
                            # meet quality standards below
                            bias = 0.1  # bias has to be within 10 centimeters
                            RMSE = 0.1  # RMSE has to be within 10 centimeters
                            if isinstance(p_dict['obs'], np.ma.masked_array
                                          ) and ~p_dict['obs'].mask.any():
                                p_dict['obs'] = np.array(p_dict['obs'])
                            # try:
                            #     # assert stats['RMSE'] < RMSE, 'RMSE test on spectral boundary energy failed'
                            #     # assert np.abs(stats['bias']) < bias, 'bias test on spectral boundary energy failed'
                            # except:
                            #     print '!!!!!!!!!!FAILED BOUNDARY!!!!!!!!'
                            #     print 'deleting data from thredds!'
                            #     os.remove(fieldOfname)
                            #     os.remove(outFileName)
                            #     raise RuntimeError('The Model Is not validating its offshore boundary condition')
        except IndexError:
            # if an index error is raised (from get data, returns no data), keep processing the rest of the stations
            continue
plt.plot(nc.num2date(CBncfile['time'][:], 'seconds since 1970-01-01'),
         CBncfile['waveHs'][:, yshoreSpot, xshoreSpot],
         '.',
         label='CB')
plt.plot(nc.num2date(HPncStatic['time'][:], 'seconds since 1970-01-01'),
         HPncStatic['waveHs'][:, yshoreSpot, xshoreSpot],
         '.',
         label='Static')
plt.plot(nc.num2date(CBTncfile['time'][:], 'seconds since 1970-01-01'),
         CBTncfile['waveHs'][:, yshoreSpot, xshoreSpot],
         '.',
         label='Thresh')
plt.legend()

t, _, _ = sb.timeMatch(HPnc['time'][:], HPnc['waveHs'][:, yshoreSpot,
                                                       xshoreSpot],
                       CBncfile['time'][:], CBncfile['waveHs'][:, yshoreSpot,
                                                               xshoreSpot])
t, _, _ = sb.timeMatch(t, range(len(t)), HPncStatic['time'][:],
                       range(len(HPncStatic['time'][:])))
t, _, CBThs = sb.timeMatch(t, range(len(t)), CBTncfile['time'][:],
                           CBTncfile['waveHs'][:, yshoreSpot, xshoreSpot])
_, _, HPShs = sb.timeMatch(t, range(len(t)), HPncStatic['time'][:],
                           HPncStatic['waveHs'][:, yshoreSpot, xshoreSpot])
_, _, HPhs = sb.timeMatch(t, range(len(t)), HPnc['time'][:],
                          HPnc['waveHs'][:, yshoreSpot, xshoreSpot])
_, _, CBhs = sb.timeMatch(t, range(len(t)), CBncfile['time'][:],
                          CBncfile['waveHs'][:, yshoreSpot, xshoreSpot])

# look at 11 m awac Hsfor each
t, hphs, cbhs = sb.timeMatch(HPnc['time'][:], HPnc['waveHs'][:, yshoreSpot,
                                                             xshoreSpot],
Beispiel #6
0
def makeTS(startTime, endTime, prefix, workDir):
    gm = getDataFRF.getDataTestBed(startTime, endTime)
    mod = gm.getCSHOREOutput(prefix)
    times = mod['time']
    model_time = times[-1]
    altStations_ = [
        oP.alt_PlotData(station, model_time, times) for station in altStations
    ]
    curStations_ = [
        oP.wave_PlotData(station, model_time, times) for station in curStations
    ]
    waveOnlyStations_ = [
        oP.wave_PlotData(station, model_time, times)
        for station in waveOnlyStations
    ]
    lidar = oP.lidar_PlotData(times)

    datestring = (startTime.strftime('%Y-%m-%dT%H%M%SZ') + '_' +
                  endTime.strftime('%Y-%m-%dT%H%M%SZ'))

    for station in curStations_ + waveOnlyStations_:
        if not station['TS_toggle']:
            continue
        for varName in ['Hs', 'V']:
            if varName == 'Hs':
                varTime = 'wave_time'
                plotName = '$H_{s}$'
                units = 'm'
                varNameM = 'Hs'
            elif varName == 'V' and station['name'] in curStations:
                varTime = 'cur_time'
                plotName = '$V$'
                units = 'm/s'
                varNameM = 'aveN'
            elif varName == 'V' and station['name'] not in curStations:
                continue
            plotTitle = '%s CSHORE %s - %s' % (prefix, startTime,
                                               station['name'])
            mod_Hs = mod[varNameM][:,
                                   np.where(
                                       abs(mod['xFRF'] - station['xFRF']) ==
                                       min(abs(mod['xFRF'] -
                                               station['xFRF'])), 1, 0) ==
                                   1].squeeze()
            comp_time_n, obs_Hs_n, mod_Hs_n = timeMatch(
                station[varTime], station[varName], times, mod_Hs)
            p_dict = {
                'time': comp_time_n,
                'obs': obs_Hs_n,
                'model': mod_Hs_n,
                'var_name': plotName,
                'units': units,
                'p_title': plotTitle
            }

            print('Plotting {} {} {} timeseries'.format(
                datestring, station['name'], varName))
            oP.obs_V_mod_TS(
                os.path.join(
                    workDir, '{}_{}_{}.png'.format(datestring, station['name'],
                                                   varName)), p_dict,
                logo_path)

    for station in altStations_:
        if not station['TS_toggle']:
            continue
        obs_zb = station['zb']
        obs_time = station['time']
        obs_loc = round(station['xFRF'])
        mod_zb = mod['zb'][:,
                           np.where(
                               abs(mod['xFRF'] - station['xFRF']) ==
                               min(abs(mod['xFRF'] -
                                       station['xFRF'])), 1, 0) ==
                           1].squeeze()
        comp_time = times
        prepData = prepDataLib.PrepDataTools()
        matchObs = prepData.prep_obs2mod(obs_time, obs_zb, comp_time)

        if np.sum(matchObs['mask']) > 0:
            mod_zb = mod_zb[np.where(~matchObs['mask'])]

        p_dict = {
            'time': matchObs['time'],
            'obs': matchObs['meanObs'],
            'model': mod_zb,
            'var_name': 'Bottom Elevation',
            'units': 'm',
            'p_title': '%s CSHORE %s - %s' % (prefix, startTime, 'Alt-04')
        }

        if np.size(p_dict['obs']) >= 2:
            print('Plotting {} {} {} timeseries'.format(
                datestring, station['name'], 'BE'))
            oP.obs_V_mod_TS(
                os.path.join(
                    workDir, '{}_{}_{}.png'.format(datestring, station['name'],
                                                   '_BE')), p_dict, logo_path)

    if not lidar['TS_toggle']:
        return
    for varName in ['runupMean', 'runup2perc']:
        obs_runup = lidar[varName]
        obs_time = lidar['runupTime']
        mod_runup = mod[varName]
        comp_time = times
        comp_time_n, obs_runup_n, mod_runup_n = timeMatch(
            obs_time, obs_runup, comp_time, mod_runup)
        if len(comp_time_n) <= 1:
            pass
        else:
            if varName == 'runupMean':
                plotName = 'Mean Run-up'
            elif varName == 'runup2perc':
                plotName = '$2\%$ Exceedance Run-up'
            p_dict = {
                'time': comp_time_n,
                'obs': obs_runup_n,
                'model': mod_runup_n,
                'var_name': plotName,
                'units': 'm',
                'p_title': '%s CSHORE %s - %s' % (prefix, startTime, 'LiDAR')
            }
            print('Plotting {} lidar {} timeseries'.format(
                datestring, varName))
            oP.obs_V_mod_TS(
                os.path.join(workDir,
                             '{}_lidar_{}.png'.format(datestring, varName)),
                p_dict, logo_path)