示例#1
0
    """A dataset by this name already exists in this group.\n 
        Due to design limitations of the HDF5 format, deleting/overwriting datasets is not recommended. \n
        Please either rename the dataset, or delete the entire hdf file and try again.\n
    """
    pass

class hdfGroupExists(Exception):
    """A group by this name already exists.\n 
        Due to design limitations of the HDF5 format, deleting/overwriting groups is not recommended. \n
        Please either rename the group, or delete the entire hdf file and try again.\n
    """
    pass





if __name__ == "__main__":
    
    exp = 'LAPD_Jan2019'
    probe = 'LAPD10'
    run = 29
    
    src = hdftools.hdfPath( '/Volumes/PVH_DATA/' + exp + '/RAW/run' + str(run) + '_' + probe + '_raw.hdf5')
    
    print(src.file)
    with h5py.File(src.file, 'r') as f:
        srcgrp = f[src.group]
        attrs = readAttrs(srcgrp)
        print(len(attrs))
示例#2
0

if __name__ == '__main__':

    #Windows
    #full = hdftools.hdfPath( os.path.join("F:", os.sep, "2019BIERMANN","FULL", "run29_LAPD_C6_full.hdf5") )
    #thinned = hdftools.hdfPath(os.path.join("F:", os.sep, "2019BIERMANN","FULL", "run29_LAPD_C6_avg.hdf5") )
    #trimmed = hdftools.hdfPath(os.path.join("F:", "LAPD_Mar2018", "FULL", "run61_LAPD1_full_trim.hdf5") )
    #avged = hdftools.hdfPath(os.path.join("F:", "LAPD_Mar2018", "FULL", "run61_LAPD1_full_avg.hdf5") )

    #OSX
    #full = hdftools.hdfPath('/Volumes/PVH_DATA/2019BIERMANN/FULL/run34_LAPD_C6_full.hdf5')
    #thinned = hdftools.hdfPath('/Volumes/PVH_DATA/2019BIERMANN/FULL/run34_LAPD_C6_avg60.hdf5')
    #trimmed = hdftools.hdfPath('/Volumes/PVH_DATA/2019BIERMANN/FULL/run29_LAPD_C6_trim.hdf5')
    #avged = hdftools.hdfPath('/Volumes/PVH_DATA/2019BIERMANN/FULL/run29_LAPD_C6_dimavged.hdf5')
    testfile = hdftools.hdfPath('/Volumes/PVH_DATA/test.hdf5')

    #x = trimDim(full, trimmed, 'time', val_bounds=[0,1e-6],verbose=True)
    #x = trimDim(full, trimmed, 'time', ind_bounds=[120, 500],verbose=True)

    #x = thinBin(full, thinned, 'shots', bin = 30, verbose=True)
    #x = thinPick(full, thinned, 'shots', step = 10, verbose=True)
    #x = avgDim(full, avged, 'shots', verbose=True)

    src = hdftools.hdfPath(
        os.path.join("/Volumes", "PVH_DATA", "LAPD_Jul2019", "FULL",
                     "run34_LAPD_C2_full.hdf5"))
    trimmed = hdftools.hdfPath(
        os.path.join("/Volumes", "PVH_DATA", "LAPD_Jul2019", "FULL",
                     "trimmed.hdf5"))
    avgreps = hdftools.hdfPath(
示例#3
0
from uclahedp.tools import hdf as hdftools
from uclahedp.tools import dataset as dataset

from uclahedp.process import bdot as bdot

data_dir = os.path.join("/Volumes", "PVH_DATA", "LAPD_Jul2019")

#run = 34
run = 7

#probe = 'LAPD3'
#probe = 'LAPD_C2'
probe = 'tdiode'

src = hdftools.hdfPath(
    os.path.join(data_dir, "FULL",
                 "run" + str(run) + "_" + probe + "_full.hdf5"))
trimmed = hdftools.hdfPath(os.path.join(data_dir, "FULL", "trimmed.hdf5"))
avgreps = hdftools.hdfPath(os.path.join(data_dir, "FULL", "avgreps.hdf5"))
thinned = hdftools.hdfPath(
    os.path.join(data_dir, "FULL",
                 "run" + str(run) + "_" + probe + "_full_thinned.hdf5"))
current = hdftools.hdfPath(
    os.path.join(data_dir, "FULL",
                 "run" + str(run) + "_" + probe + "_full_current.hdf5"))

#This step seems to be working
print("Trimming time dimension")
dataset.trimDim(src, trimmed, 'time', val_bounds=[0, 50e-6], verbose=True)
print("Averaging over reps dimension")
#dataset.avgDim(trimmed, avgreps, 'reps', verbose=True, delsrc=False)
示例#4
0
            goodshots_arr.append(i)

    print("Found " + str(len(badshots_arr)) + ' bad shots')

    if len(badshots_arr) / nshots > fatal_badshot_percentage:
        raise ValueError("Lots of bad shots found! Bad sign! Aborting.")

    return badshots_arr, goodshots_arr


if __name__ == "__main__":
    #src = hdftools.hdfPath( os.path.join("F:", "LAPD_Mar2018", "RAW", "test_tdiode_raw.hdf5"))
    #dest = hdftools.hdfPath( os.path.join("F:", "LAPD_Mar2018", "RAW", "test_tdiode_full.hdf5"))

    probe = 'tdiode'
    run = 6

    src = hdftools.hdfPath('/Volumes/PVH_DATA/2019BIERMANN/RAW/' + 'run' +
                           str(run) + '_' + probe + '_raw.hdf5')
    dest = hdftools.hdfPath('/Volumes/PVH_DATA/2019BIERMANN/FULL/' + 'run' +
                            str(run) + '_' + probe + '_full.hdf5')

    #Delete the output file if it already exists
    try:
        os.remove(dest.file)
    except FileNotFoundError:
        pass

    #print(calcT0(src)[0:20] )
    #print(findBadShots(src) )
    tdiodeRawToFull(src, dest, verbose=True)
示例#5
0
        grp['frames'].attrs['unit'] = ''

        grp.require_dataset('xpixels', (nxpx, ), np.float32,
                            chunks=True)[:] = np.arange(nxpx)
        grp['xpixels'].attrs['unit'] = ''

        grp.require_dataset('ypixels', (nypx, ), np.float32,
                            chunks=True)[:] = np.arange(nypx)
        grp['ypixels'].attrs['unit'] = ''

        grp.require_dataset('chan', (nchan, ), np.float32,
                            chunks=True)[:] = np.arange(nchan)
        grp['chan'].attrs['unit'] = ''

    return dest


if __name__ == "__main__":

    #data_dir = os.path.join("F:","LAPD_Jul2019")
    data_dir = os.path.join("/Volumes", "PVH_DATA", "LAPD_Sept2019")

    img_dir = os.path.join(data_dir, 'PIMAX')
    dest = hdftools.hdfPath(
        os.path.join(data_dir, "RAW", "run22.01_pimax4.hdf5"))
    csv_dir = os.path.join(data_dir, "METADATA")

    run = 22.01
    probe = 'pimax4'

    imgDirToRaw(run, probe, img_dir, dest, csv_dir, verbose=False)
示例#6
0
                                        np.float32,
                                        chunks=True)[:] = np.arange(nypx)
                destgrp['ypixels'].attrs['unit'] = ''
                dimlabels.append('ypixels')

            destgrp.require_dataset('reps', (nreps, ), np.float32,
                                    chunks=True)[:] = np.arange(nreps)
            destgrp['reps'].attrs['unit'] = ''
            dimlabels.append('reps')

            destgrp.require_dataset('chan', (nchan, ), np.float32,
                                    chunks=True)[:] = np.arange(nchan)
            destgrp['chan'].attrs['unit'] = ''
            dimlabels.append('chan')

            destgrp['data'].attrs['dimensions'] = [
                s.encode('utf-8') for s in dimlabels
            ]


if __name__ == "__main__":

    #data_dir = os.path.join("F:","LAPD_Jul2019")
    data_dir = os.path.join("/Volumes", "PVH_DATA", "LAPD_Sept2019")

    src = hdftools.hdfPath(
        os.path.join(data_dir, 'RAW', 'run15.01_pimax4.hdf5'))
    dest = hdftools.hdfPath(
        os.path.join(data_dir, "FULL", "run15.01_pimax4_full.hdf5"))

    imgSeqRawToFull(src, dest)
示例#7
0
     ndest=  hdftools.hdfPath(os.path.join("/Volumes", "PVH_DATA", "LAPD_Mar2018", "FULL","run104_JanusBaO_density.hdf5"))
     tdest=  hdftools.hdfPath(os.path.join("/Volumes", "PVH_DATA", "LAPD_Mar2018", "FULL","run104_JanusBaO_temperature.hdf5"))
     
     #f=  hdftools.hdfPath(os.path.join("F:", "LAPD_Mar2018", "RAW","run104_JanusBaO_raw.hdf5"))
     #ndest=  hdftools.hdfPath(os.path.join("F:", "LAPD_Mar2018", "FULL","run104_JanusBaO_density.hdf5"))
     #tdest=  hdftools.hdfPath(os.path.join("F:", "LAPD_Mar2018", "FULL","run104_JanusBaO_temperature.hdf5"))
     resistor = 2.2
     
     vsweepLangmuirRawToFull(f, ndest, tdest, verbose=True, grid=True)
     
   
     
     """

    #f=  hdftools.hdfPath(os.path.join("/Volumes", "PVH_DATA", "LAPD_Mar2018", "RAW","run104_JanusLaB6_raw.hdf5"))
    f = hdftools.hdfPath(
        os.path.join("F:", "LAPD_Mar2018", "RAW", "run104_JanusLaB6_raw.hdf5"))

    #isatRawToFull(f, ndest, verbose=True, grid=True)

    with h5py.File(f.file, 'a') as sf:
        resistor = 2.2
        shot = 400
        reps = 5
        time = sf['time'][:] * 8
        current = sf['data'][shot:shot + reps, :,
                             0] / resistor  #2.2 Ohm resistor, convert to A
        voltage = sf['data'][shot:shot + reps, :,
                             1] * 100  #Attenuation was x100

        current = np.mean(current, axis=0)
        voltage = np.mean(voltage, axis=0)
示例#8
0
文件: lapd.py 项目: uclahedp/uclahedp
    

    
    
#Simple test program
if __name__ == "__main__":
    
    exp = 'LAPD_Jan2019'
    probe = 'LAPD_C6'
    run = 25

    
    hdf_dir =  os.path.join("F:", exp, "HDF")
    csv_dir =  os.path.join("F:", exp, "METADATA")
    dest = hdftools.hdfPath( os.path.join("F:", exp, "RAW", 'run' + str(run) + '_' + probe + '_raw.hdf5'))

    hdf_dir = '/Volumes/PVH_DATA/' + exp + '/HDF/'
    csv_dir = '/Volumes/PVH_DATA/' + exp + '/METADATA/'
    dest = hdftools.hdfPath( '/Volumes/PVH_DATA/' +  exp +  '/RAW/run' + str(run) + '_' + probe + '_raw.hdf5')
    
    
    #Delete the output file if it already exists
    try:
        os.remove(dest.file)
    except FileNotFoundError:
        pass

    print('reading')
    util.mem()
    tstart = util.timeTest()
示例#9
0
    ref = np.fft.ifft(ref_fft)

    #Compute the phase dufference by multiplying by the conjugate
    sig = sig * np.conj(ref)
    #Separate out the phase
    phase = np.angle(sig)
    #Unwrap the phase (correct jumps of more than pi)
    phase = np.unwrap(phase)

    return phase


if __name__ == "__main__":

    sig_file = hdftools.hdfPath(
        os.path.join("F:", "LAPD_Mar2018", "RAW",
                     "run83_interferometer_signal.hdf5"))
    ref_file = hdftools.hdfPath(
        os.path.join("F:", "LAPD_Mar2018", "RAW",
                     "run83_interferometer_reference.hdf5"))
    dest_file = hdftools.hdfPath(
        os.path.join("F:", "LAPD_Mar2018", "FULL",
                     "run83_interferometer.hdf5"))

    with h5py.File(sig_file.file) as f:
        sig = f['data'][:, 0]
        time = f['time'][:]
        nti = time.shape[0]

    with h5py.File(ref_file.file) as f:
        ref = f['data'][0:nti, 0]
示例#10
0
def processMany(data_dir,
                runs=None,
                probes=None,
                overwrite_raw=True,
                overwrite_full=True,
                csv_dir=None,
                hdf_dir=None,
                rawsource=None,
                trange=[0, -1],
                use_tdiode='tdiode'):

    if csv_dir is None:
        csv_dir = os.path.join(data_dir, "METADATA")

    if hdf_dir is None:
        hdf_dir = os.path.join(data_dir, "HDF")

    if runs is None:
        runlist = csv.getRunList(csv_dir)
    else:
        runlist = runs

    for run in runlist:

        print("Processing run: " + str(run))

        #Load the probe list
        probelist = csv.getProbeList(csv_dir, run)
        #If a tdiode exists, bring it to the front of the list so it gets called first

        if (use_tdiode, 'tdiode') in probelist:
            #Pop the tdiode to the front of the list, since others depend on it
            probelist.insert(
                0, probelist.pop(probelist.index((use_tdiode, 'tdiode'))))

        if len(probelist) == 0:
            print("WARNING: NO PROBES FOUND FOR RUN " + str(run))

        for probe in probelist:
            tdiode_full = hdf.hdfPath(
                os.path.join(
                    data_dir, "FULL",
                    'run' + str(run) + '_' + use_tdiode + '_full.hdf5'))

            print(tdiode_full.file)
            file_exists = os.path.exists(tdiode_full.file)
            if file_exists:
                print("Using tdiode file: " + str(tdiode_full.file))
            else:
                print("NO TDIODE FOUND: WILL NOT CORRECT FOR TIMING!")
                tdiode_full = None

            #If the probes array is set but doesn't include this probe,
            #skip it
            if probes is not None and probe[0] not in probes:
                print("SKIPPING: " + probe[0] + ', NOT IN PROBE LIST!')
                continue
            print("Processing probe: " + str(probe))
            process(data_dir,
                    run,
                    probe,
                    overwrite_raw=overwrite_raw,
                    overwrite_full=overwrite_full,
                    csv_dir=csv_dir,
                    hdf_dir=hdf_dir,
                    tdiode_hdf=tdiode_full,
                    trange=trange,
                    rawsource=rawsource)
示例#11
0
def process(data_dir,
            run,
            probe,
            overwrite_raw=True,
            overwrite_full=True,
            csv_dir=None,
            hdf_dir=None,
            img_dir=None,
            tdiode_hdf=None,
            trange=None,
            rawsource=None):

    if csv_dir is None:
        csv_dir = os.path.join(data_dir, "METADATA")

    if hdf_dir is None:
        hdf_dir = os.path.join(data_dir, "HDF")

    if img_dir is None:
        img_dir = os.path.join(data_dir, "PIMAX")

    if rawsource is None:
        rawsource = 'LAPD'

    probe_string = 'run' + str(run) + '_' + probe[0]

    rawfile = hdf.hdfPath(
        os.path.join(data_dir, "RAW", probe_string + '_raw.hdf5'))
    fullfile = hdf.hdfPath(
        os.path.join(data_dir, "FULL", probe_string + '_full.hdf5'))

    #Make the directory if necessary
    os.makedirs(os.path.dirname(rawfile.file), exist_ok=True)
    os.makedirs(os.path.dirname(fullfile.file), exist_ok=True)

    #Make the raw file
    if os.path.exists(rawfile.file):
        if overwrite_raw:
            os.remove(rawfile.file)
    if not os.path.exists(rawfile.file):
        if rawsource == 'LAPD':
            print("Running lapdToRaw")
            rawfile = lapd.lapdToRaw(run,
                                     probe[0],
                                     hdf_dir,
                                     csv_dir,
                                     rawfile,
                                     trange=trange,
                                     verbose=True)
        if rawsource == 'HRR':
            print("Running hrrToRaw")
            rawfile = hrr.hrrToRaw(run,
                                   probe[0],
                                   hdf_dir,
                                   csv_dir,
                                   rawfile,
                                   verbose=True,
                                   debug=False)
        if rawsource == 'imgdir':
            print("Running ImgDirToRaw")
            rawfile = imgdir.imgDirToRaw(run,
                                         probe[0],
                                         img_dir,
                                         rawfile,
                                         csv_dir,
                                         verbose=True)

    else:
        print("Raw file exist: skipping")

    print(tdiode_hdf)
    #Make the full file
    if os.path.exists(fullfile.file):
        if overwrite_full:
            os.remove(fullfile.file)

    if not os.path.exists(fullfile.file):
        if probe[1] == 'tdiode':
            print("Running tdiodeRawToFull")
            fullfile = tdiode.tdiodeRawToFull(rawfile,
                                              fullfile,
                                              verbose=True,
                                              fatal_badshot_percentage=.2,
                                              badshotratio=10)
        elif probe[1] == 'bdot':
            print("Running bdotRawToFull")
            fullfile = bdot.bdotRawToFull(
                rawfile,
                fullfile,
                tdiode_hdf=tdiode_hdf,
                grid=True,
                calibrate=True,
                integrate=True,
                angle_correction=True,
                verbose=True,
                highfreq_calibrate=False,
                strict_axes=False,
                strict_grid=False,
                remove_offset=True,
                offset_range=(0, -50),
                offset_rel_t0=[False, True])  #grid_precision = 0.25,
        elif probe[1] == 'isat':
            print("Running isatRawToFull")
            fullfile = langmuir.isatRawToFull(rawfile,
                                              fullfile,
                                              tdiode_hdf=tdiode_hdf,
                                              verbose=True,
                                              grid=True,
                                              mu=1,
                                              ti=1)
        elif probe[1] == 'vsweep':
            print("Running vsweepRawToFull")
            nfile = hdf.hdfPath(
                os.path.join(data_dir, "FULL", probe_string + '_ne.hdf5'))
            tfile = hdf.hdfPath(
                os.path.join(data_dir, "FULL", probe_string + '_te.hdf5'))

            fullfile = langmuir.vsweepLangmuirRawToFull(rawfile,
                                                        nfile,
                                                        tfile,
                                                        verbose=True,
                                                        grid=True,
                                                        plots=True)

        elif probe[1] == 'scope':
            print("Running scopeRawToFull")
            fullfile = scope.scopeRawToFull(rawfile,
                                            fullfile,
                                            tdiode_hdf=tdiode_hdf,
                                            verbose=False,
                                            debug=False)

        elif probe[1] == 'camera':
            print("Running imgseqRawToFull")
            fullfile = imgseq.imgSeqRawToFull(rawfile, fullfile)

        else:
            print("NO MATCHING PROBE TYPE ROUTINE EXISTS: SKIPPING!")
    else:
        print("Full file exist: skipping")
示例#12
0
        #Create the axes
        grp.require_dataset(str(axis_name), (nelm, ), np.float32,
                            chunks=True)[:] = axis
        grp[str(axis_name)].attrs['unit'] = str(axis_unit)

        grp.require_dataset('chan', (nchan, ), np.float32,
                            chunks=True)[:] = np.arange(nchan)
        grp['chan'].attrs['unit'] = ''

    return dest


if __name__ == "__main__":
    src = hdftools.hdfPath(
        os.path.join("F:", "LAPD_Mar2018", "INTERFEROMETER",
                     "C1January 201700044.txt"))
    dest = hdftools.hdfPath(
        os.path.join("F:", "LAPD_Mar2018", "RAW",
                     "44_Interferometer_Probe.hdf5"))

    csv_dir = os.path.join("F:", "LAPD_Mar2018", "METADATA", "CSV")
    run = 84
    probe = 'TonyInterferometer'

    asciiToRaw(src,
               dest,
               skip_header=5,
               ax=0,
               axis_name='time',
               axis_unit='s',