def filterLR_window(infile, outfiles, outfile_root, expt_id): logging.info( "Inside filterLR_window. Computing HT of dz within a small window in time and each with a separate fw_id." ) dz_id = pickle.load(open(infile)) expt_id = int(expt_id) # get num_frames num_frames = Spectrum_LR.get_num_frames(expt_id) print num_frames for oo in outfiles: os.unlink(oo) for n, startF in enumerate(range(1, num_frames, 100)): fw_id = Spectrum_LR.task_DzHilbertTransform( dz_id, cache=cacheValue, rowS=params[expt_id]['rowS'], rowE=params[expt_id]['rowE'], colS=params[expt_id]['colS'], colE=params[expt_id]['colE'], startF=startF, # startF is the frame where to start the window from period=2 # length of the window in time ) if (fw_id is None): break print "fw_id", fw_id for plotcol in [100, 1000]: outfile = "%s.%d.%d.fw_id" % (outfile_root, n, plotcol) pickle.dump(fw_id, open(outfile, 'w'))
def filterLR_window(infile,outfiles, outfile_root,expt_id): logging.info("Inside filterLR_window. Computing HT of dz within a small window in time and each with a separate fw_id.") dz_id = pickle.load(open(infile)) expt_id = int(expt_id) # get num_frames num_frames = Spectrum_LR.get_num_frames(expt_id) print num_frames for oo in outfiles: os.unlink(oo) for n, startF in enumerate(range(1, num_frames, 100)): fw_id = Spectrum_LR.task_DzHilbertTransform(dz_id, cache=cacheValue, rowS = params[expt_id]['rowS'], rowE = params[expt_id]['rowE'], colS = params[expt_id]['colS'], colE = params[expt_id]['colE'], startF=startF, # startF is the frame where to start the window from period=2 # length of the window in time ) if (fw_id is None): break print "fw_id",fw_id for plotcol in [100, 1000]: outfile = "%s.%d.%d.fw_id" % (outfile_root, n, plotcol) pickle.dump(fw_id, open(outfile, 'w'))
def plotWavesVerticalTimeSeries(infile, outfile): fw_id = pickle.load(open(infile)) Spectrum_LR.filtered_waves_VTS( fw_id, 600, # column number .04, # maxmin plotName = outfile, )
def energyFluxPlots(infiles, outfiles): logging.info('Inside EnergyFluxPlots. Making the energy flux plots for every fw_id created') logging.info(infiles) print infiles raw_EF,left_EF,right_EF,t,x,z = pickle.load(open(infiles[0])) raw_VAEF,left_VAEF,right_VAEF,t = pickle.load(open(infiles[1])) #print raw_EF.shape,t.shape,raw_VAEF.shape Spectrum_LR.plot_energy_flux_VTS(raw_EF,left_EF,right_EF,t,z,0.05,outfiles[0]) plotting_functions.sharexy_overlay3plots(raw_VAEF,left_VAEF,right_VAEF,t,0.7,'Raw','Left','Right','Time','Energy flux',outfiles[1])
def plotWavesVerticalTimeSeries(infile, outfile): fw_id = pickle.load(open(infile)) Spectrum_LR.filtered_waves_VTS( fw_id, 600, # column number .04, # maxmin plotName=outfile, )
def plotWavesVTSWindow(infile,outfile): print infile fw_id = pickle.load(open(infile)) print "&",outfile print len(outfile) ###", outfile print "####",fw_id Spectrum_LR.filtered_waves_VTS(fw_id, 600, # column number .04, # maxmin plotName = outfile, )
def plotWavesVTSWindow(infile, outfile): print infile fw_id = pickle.load(open(infile)) print "&", outfile print len(outfile) ###", outfile print "####", fw_id Spectrum_LR.filtered_waves_VTS( fw_id, 600, # column number .04, # maxmin plotName=outfile, )
def energyFluxPlots(infiles, outfiles): logging.info( 'Inside EnergyFluxPlots. Making the energy flux plots for every fw_id created' ) logging.info(infiles) print infiles raw_EF, left_EF, right_EF, t, x, z = pickle.load(open(infiles[0])) raw_VAEF, left_VAEF, right_VAEF, t = pickle.load(open(infiles[1])) #print raw_EF.shape,t.shape,raw_VAEF.shape Spectrum_LR.plot_energy_flux_VTS(raw_EF, left_EF, right_EF, t, z, 0.05, outfiles[0]) plotting_functions.sharexy_overlay3plots(raw_VAEF, left_VAEF, right_VAEF, t, 0.7, 'Raw', 'Left', 'Right', 'Time', 'Energy flux', outfiles[1])
def filterLR(infile, outfile): dz_id = pickle.load(open(infile)) fw_id = Spectrum_LR.task_DzHilbertTransform(dz_id, cacheValue, rowS=320, rowE=860, colS=60, colE=1260) print "fw_id", fw_id pickle.dump(fw_id, open(outfile, 'w'))
def computeEnergyFlux(infile, outfiles, plotcol): plotcol = int(plotcol) logging.info("Computing the energy flux values for every fw_id and storing them in disk as 2 separate files.") print infile fw_id = pickle.load(open(infile)) print "& outfile:",outfiles print "FW_ID #",fw_id raw_EF,left_EF,right_EF,t,z,x,raw_VAEF,left_VAEF,right_VAEF= Spectrum_LR.compute_vertically_averaged_energy_flux(fw_id, plotcol=plotcol) pickle.dump((raw_EF,left_EF,right_EF,t,x,z), open(outfiles[0], 'w')) pickle.dump((raw_VAEF,left_VAEF,right_VAEF,t), open(outfiles[1], 'w'))
def mergeEnergyFlux(infiles, outfile): logging.info("Merge all the VAEF files in the workflow to create the plot consisting of all fw_id average energy flux values") # sort by part number increasing numerically infiles = sorted(infiles, key=lambda s: int(os.path.basename(s).split('.')[1]) ) logging.info(infiles) logging.info(outfile) f = open(outfile, 'w') f.write('t, raw, left, right\n') for infile in infiles: raw_VAEF,left_VAEF,right_VAEF,t = pickle.load(open(infile)) print len(raw_VAEF),len(left_VAEF),len(right_VAEF) a,b,c,d = Spectrum_LR.compute_mergeEF(raw_VAEF,left_VAEF,right_VAEF,t) f.write( "%f, %f, %f, %f\n" % (d, a,b,c)) f.close()
def computeEnergyFlux(infile, outfiles, plotcol): plotcol = int(plotcol) logging.info( "Computing the energy flux values for every fw_id and storing them in disk as 2 separate files." ) print infile fw_id = pickle.load(open(infile)) print "& outfile:", outfiles print "FW_ID #", fw_id raw_EF, left_EF, right_EF, t, z, x, raw_VAEF, left_VAEF, right_VAEF = Spectrum_LR.compute_vertically_averaged_energy_flux( fw_id, plotcol=plotcol) pickle.dump((raw_EF, left_EF, right_EF, t, x, z), open(outfiles[0], 'w')) pickle.dump((raw_VAEF, left_VAEF, right_VAEF, t), open(outfiles[1], 'w'))
def mergeEnergyFlux(infiles, outfile): logging.info( "Merge all the VAEF files in the workflow to create the plot consisting of all fw_id average energy flux values" ) # sort by part number increasing numerically infiles = sorted(infiles, key=lambda s: int(os.path.basename(s).split('.')[1])) logging.info(infiles) logging.info(outfile) f = open(outfile, 'w') f.write('t, raw, left, right\n') for infile in infiles: raw_VAEF, left_VAEF, right_VAEF, t = pickle.load(open(infile)) print len(raw_VAEF), len(left_VAEF), len(right_VAEF) a, b, c, d = Spectrum_LR.compute_mergeEF(raw_VAEF, left_VAEF, right_VAEF, t) f.write("%f, %f, %f, %f\n" % (d, a, b, c)) f.close()
def plotFilteredLR(infile, outfile): fw_id = pickle.load(open(infile)) Spectrum_LR.plot_data(fw_id, plotName = outfile)
def filterLR(infile, outfile): dz_id = pickle.load(open(infile)) fw_id = Spectrum_LR.task_DzHilbertTransform(dz_id, cacheValue,rowS=320,rowE = 860,colS=60,colE=1260) print "fw_id",fw_id pickle.dump(fw_id, open(outfile, 'w'))
def plotFilteredLR(infile, outfile): fw_id = pickle.load(open(infile)) Spectrum_LR.plot_data(fw_id, plotName=outfile)