Esempio n. 1
0
copyfile(h5_og_path, h5_path)

M = 125
Ns = int(5e8)

with h5py.File(h5_path, mode='r+') as h5_f:

    h5_grp = h5_f['Measurement_000/Channel_000']
    f = usid.hdf_utils.get_attr(h5_grp, 'excitation_frequency_[Hz]')
    V0 = usid.hdf_utils.get_attr(h5_grp, 'excitation_amplitude_[V]')

    h5_resh = h5_f[
        'Measurement_000/Channel_000/Raw_Data-FFT_Filtering_000/Filtered_Data-Reshape_000/Reshaped_Data']

    # initialize the Process with the desired parameters
    abi = AdaptiveBayesianInference(h5_resh, f=f, V0=V0, Ns=Ns, M=M)

    # record the time it takes to compute one random pixel
    startTime = time.time()
    pixResultBoi = abi.test()
    totalTime = time.time() - startTime

# delete the copy of the dataset we created
os.remove(h5_path)

# record the time in a file pixel by pixel so we don't have to wait for all
# the pixels to finish (in case something crashes haha)
outputFile = open("timings.txt", "a")
outputFile.write("{}\n".format(totalTime))
outputFile.close()
Esempio n. 2
0
# This makes graphs for the subset results to see if they worked.

import h5py
import io
import time
from bayesian_inference import AdaptiveBayesianInference
from matplotlib import pyplot as plt
import pyUSID as pyUSID
import numpy as np
import pyUSID as usid

h5_path = r"C:\Users\Administrator\Documents\29TSummer2019\subsetFile1565658168.7634015.h5"

with h5py.File(h5_path, mode="r+") as h5_f:
    h5_main = h5_f["subsetBoi/Measured Current"]
    h5_results = h5_f["subsetBoi/Measured Current-Adaptive_Bayesian_000"]

    abi = AdaptiveBayesianInference(h5_main)

    for i in range(h5_main.shape[0]):
        figBoi = abi.plotPixel(i, h5_results_grp=h5_results)
        figBoi.set_size_inches(30, 10)
        figBoi.show()
        input("Press <Enter> for next figure...")
Esempio n. 3
0
                                   "nA",
                                   None,
                                   None,
                                   dtype=np.float64,
                                   h5_pos_inds=h5_pos_inds,
                                   h5_pos_vals=h5_pos_vals,
                                   h5_spec_inds=h5_spec_inds,
                                   h5_spec_vals=h5_spec_vals)
    h5_subset[()] = h5_resh[()][pixelInds, :]
    #print("check if main returns: {}".format(usid.hdf_utils.check_if_main(h5_subset)))

# remove the temporarily created full copy
os.remove(h5_path)

# create new Process class and run compute
abi = AdaptiveBayesianInference(h5_subset, f=f, V0=V0, Ns=Ns, M=M)
print("starting compute")
startTime = time.time()
h5_bayes_group = abi.compute()
totalTime = time.time() - startTime
print("compute ended")

# close the subset file
sub_f.close()
os.remove(sub_f_path)

# record the timing results
outputFile = open("parallelTimings.txt", "a")
outputFile.write("{}\n".format(totalTime))
outputFile.close()
Esempio n. 4
0
    h5_grp = h5_f['Measurement_000/Channel_000']
    f = usid.hdf_utils.get_attr(h5_grp, 'excitation_frequency_[Hz]')
    V0 = usid.hdf_utils.get_attr(h5_grp, 'excitation_amplitude_[V]')

    h5_resh = h5_f[
        'Measurement_000/Channel_000/Raw_Data-FFT_Filtering_000/Filtered_Data-Reshape_000/Reshaped_Data']

    for i in range(len(NsVals)):
        for j in range(len(mVals)):
            print("Starting run with Ns = {} and M = {}".format(
                NsVals[i], mVals[j]))

            # initialize the Process with the desired parameters
            abi = AdaptiveBayesianInference(h5_resh,
                                            f=f,
                                            V0=V0,
                                            Ns=NsVals[i],
                                            M=mVals[j])

            # record the time it takes to compute one random pixel
            startTime = time.time()
            pix_ind, figBoi = abi.test(pix_ind=43763)
            totalTime = time.time() - startTime

            # store the time for graphing later
            timingResults[i][j] = totalTime

            # record the time in a file pixel by pixel so we don't have to wait for all
            # the pixels to finish (in case something crashes haha)
            outputFile = open("parameterTests/timings.txt", "a")
            outputFile.write(
import h5py
#from mpi4py import MPI
from bayesian_inference import AdaptiveBayesianInference

h5_path = 'pzt_nanocap_6_split_bayesian_compensation_R_correction.h5'

with h5py.File(h5_path, mode='r+') as h5_f:

    h5_grp = h5_f['Measurement_000/Channel_000']

    h5_resh = h5_grp[
        'Raw_Data-FFT_Filtering_000/Filtered_Data-Reshape_000/Reshaped_Data']

    abi = AdaptiveBayesianInference(h5_resh, verbose=True)

    h5_bayes_grp = abi.compute()
import h5py
import os
import matplotlib.pyplot as plt
#from mpi4py import MPI
from bayesian_inference import AdaptiveBayesianInference

h5_path = 'pzt_nanocap_6_split_bayesian_compensation_R_correction.h5'

with h5py.File(h5_path, mode='r+') as h5_f:

    h5_grp = h5_f['Measurement_000/Channel_000']

    h5_resh = h5_grp[
        'Raw_Data-FFT_Filtering_000/Filtered_Data-Reshape_000/Reshaped_Data']

    abi = AdaptiveBayesianInference(h5_resh, Ns=int(10), verbose=True)

    fig1, fig2 = abi.test()
    #plt.savefig("result0.png")
    #plt.savefig("result1.png")
Esempio n. 7
0
with h5py.File(h5_path, mode='r+') as h5_f:

    h5_grp = h5_f['Measurement_000/Channel_000']
    f = usid.hdf_utils.get_attr(h5_grp, 'excitation_frequency_[Hz]')
    V0 = usid.hdf_utils.get_attr(h5_grp, 'excitation_amplitude_[V]')

    h5_resh = h5_f['Measurement_000/Channel_000/Raw_Data-FFT_Filtering_000/Filtered_Data-Reshape_000/Reshaped_Data']

    for i in range(len(timingNames)):
        print("Starting running stuff in {}".format(timingNames[i]))
        os.chdir(timingNames[i])
        from bayesian_inference import AdaptiveBayesianInference
        
        # initialize the Process with the desired parameters
        abi = AdaptiveBayesianInference(h5_resh, f=f, V0=V0, Ns=timingNs[i], M=timingM[i])
        
        for j in range(10):
            # record the time it takes to compute one random pixel
            startTime = time.time()
            pixResultBoi = abi.test()
            totalTime = time.time() - startTime

            # record the time in a file pixel by pixel so we don't have to wait for all
            # the pixels to finish (in case something crashes haha)
            outputFile = open("VMtimings.txt", "a")
            outputFile.write("{}\n".format(totalTime))
            outputFile.close()

        del AdaptiveBayesianInference
        print("Finished running stuff in {}".format(timingNames[i]))
Esempio n. 8
0
from bayesian_inference import AdaptiveBayesianInference
from matplotlib import pyplot as plt
import pyUSID as usid
import numpy as np

h5_path = r"C:\Users\Administrator\Dropbox\GIv Bayesian June 2019\pzt_nanocap_6_split_bayesian_compensation_R_correction.h5"

with h5py.File(h5_path, mode='r+') as h5_f:

    h5_grp = h5_f['Measurement_000/Channel_000']
    f = usid.hdf_utils.get_attr(h5_grp, 'excitation_frequency_[Hz]')
    V0 = usid.hdf_utils.get_attr(h5_grp, 'excitation_amplitude_[V]')

    h5_resh = h5_f['Measurement_000/Channel_000/Raw_Data-FFT_Filtering_000/Filtered_Data-Reshape_000/Reshaped_Data']

    abi = AdaptiveBayesianInference(h5_resh, f=f, V0=V0, Ns=int(1e8))

    figFor, figRev = abi.test(pix_ind=27935)

    figFor.set_size_inches(25, 10)
    figRev.set_size_inches(25, 10)

    figFor.savefig("27935forward1e8.png")
    figRev.savefig("27935reverse1e8.png")

    #figFor.show()
    #figRev.show()

    #breakpoint()

h5_path = r"C:\Users\Administrator\Dropbox\GIv Bayesian June 2019\pzt_nanocap_6_split_bayesian_compensation_R_correction (Alvin Tan's conflicted copy 2019-06-25).h5"

with h5py.File(h5_path, mode='r+') as h5_f:

    h5_grp = h5_f['Measurement_000/Channel_000']
    f = usid.hdf_utils.get_attr(h5_grp, 'excitation_frequency_[Hz]')
    V0 = usid.hdf_utils.get_attr(h5_grp, 'excitation_amplitude_[V]')

    h5_resh = h5_f[
        'Measurement_000/Channel_000/Raw_Data-FFT_Filtering_000/Filtered_Data-Reshape_000/Reshaped_Data']

    Ns = int(1e7)
    M = 24

    abi = AdaptiveBayesianInference(h5_resh, f=f, V0=V0, Ns=Ns, M=M)

    pos_in_batch = [0, 138, 3994, 27935]

    start_time = time.time()

    # lol bad coding practices for the win
    abi._create_results_datasets()
    abi._unit_computation(pos_in_batch=pos_in_batch)
    abi._write_results_chunk(pos_in_batch=pos_in_batch)

    print(
        "Processing {} pixels with Ns = {} and M = {} took {} seconds".format(
            len(pos_in_batch), Ns, M,
            time.time() - start_time))
Esempio n. 10
0
    h5_resh = h5_grp[
        'Raw_Data-FFT_Filtering_000/Filtered_Data-Reshape_000/Reshaped_Data']

    # Each 'X' axes has 256 images, so [0,1] have 512 images
    myDict = {}
    #myDict['X'] = [0,1]
    '''
    # Pseudorandomly select two rows to process
    randNum1 = random.randrange(266)
    randNum2 = random.randrange(266)
    myDict['X'] = [randNum1, randNum2]

    print("Running compute on rows {} and {}".format(randNum1, randNum2))
    '''
    # Creates object from original data
    abi = AdaptiveBayesianInference(h5_resh)
    '''
    #slice data
    subset = abi.h5_main.slice_to_dataset(slice_dict=myDict)
    #create object from sliced data
    abi_subset = AdaptiveBayesianInference(subset, f=f, V0=V0, Ns=Ns, M=M)
    '''
    #compute
    startTime = time.time()
    #h5_bayes_group = abi_subset.test()
    h5_bayes_group = abi.compute()
    totalTime = time.time() - startTime

    #copy result group to new h5 file
    with h5py.File('bayesian_results{}.h5'.format(time.time()), 'a') as f:
        f.create_group("results0")