Beispiel #1
0
def plot_polar(phi, r, ax=None):
    y_r = config['y_range']
    y_th0 = config['y_thresholds'][0]
    y_th1 = config['y_thresholds'][1]
    y_target_norm = analysis.normalize(config['y_target'], y_r)
    y_th0_norm = analysis.normalize(y_th0, y_r)
    y_th1_norm = analysis.normalize(y_th1, y_r)
    phi_target = np.arccos(y_target_norm)
    phi_th0 = np.arccos(y_th0_norm)
    phi_th1 = np.arccos(y_th1_norm)

    if ax is None:
        fig = plt.figure(figsize=(15, 15))
        ax = fig.add_subplot(111, polar=True)
    ax.set_thetamin(0)
    ax.set_thetamax(180)
    ax.set_xticks(np.pi / 180. * np.linspace(180, 0, 4, endpoint=False))
    ax.fill_betweenx(r,
                     np.pi - phi_target,
                     np.pi - phi,
                     color=config['color_standard'],
                     alpha=0.2)
    ax.plot([np.pi - phi_target, np.pi - phi_target], [0, 1],
            dashes=(2, 3),
            c=config['color_default'])

    if y_th0:
        phi_cold = np.where(phi > phi_th0, phi, phi_th0)
        ax.fill_betweenx(r,
                         np.pi - phi_th0,
                         np.pi - phi_cold,
                         color=config['color_cold'],
                         alpha=0.2)
        ax.plot([np.pi - phi_th0, np.pi - phi_th0], [0, 1],
                dashes=(4, 4),
                c=config['color_cold'])
    if y_th1:
        phi_warm = np.where(phi < phi_th1, phi, phi_th1)
        ax.fill_betweenx(r,
                         np.pi - phi_th1,
                         np.pi - phi_warm,
                         color=config['color_warm'],
                         alpha=0.2)
        ax.plot([np.pi - phi_th1, np.pi - phi_th1], [0, 1],
                dashes=(4, 4),
                c=config['color_warm'])

    ax.plot(np.pi - phi, r, c=config['color_default'])

    return ax
Beispiel #2
0
def analyze():
    raw = request.get_data()
    curve = raw_to_dataframe(raw)
    curve = normalize(curve)
    last_coin['curve'] = curve
    last_coin['datetime'] = datetime.today().isoformat()
    coin = classify_coin(curve)
    last_coin['coin'] = coin
    if last_coin['coin'] == -1:
        last_coin['coin'] = 'unknown'
    return 'ok'
Beispiel #3
0
# print(np.shape(test_V_arr_for_combo))
#==============================================================================================================
# Process Base and Mixture Data
#==============================================================================================================

# dim(trace_V_arr) = (3, 1000, 2300) which is (num base odors, num neurons in network, # timesteps after start=100ms)
# dim(X) =  (6900, 1000) which is (num base odors*# timesteps after start=100ms, num neurons in network)
X = np.hstack(trace_V_arr).T

#normalize training (base odor) data
mini = np.min(
    X)  # lowest voltage of any neuron's voltages at any time for any odor
maxi = np.max(
    X)  # highest voltage of any neuron's voltages at any time for any odor
normalized_Training_Voltages = anal.normalize(
    X, mini, maxi)  # 0 if minimum value, 1 if maximum value.

point_Labels = np.hstack(label_arr)

# Train the SVM on base odors and labels
clf = anal.learnSVM(normalized_Training_Voltages, point_Labels)

for rowNum, aCombo in enumerate(baseNum):

    # load, ignoring four of the outputs
    _,_,_,test_V_arr_for_combo[rowNum],_,label_test_arr_for_combo[rowNum] \
        = anal.load_data(te_prefix+'combo_'+str(rowNum)+"_", num_runs = number_Of_Mix_Runs)

    test_data = test_V_arr_for_combo[rowNum]  # mixed odor voltage
    test_data = anal.normalize(
        test_data, mini,
Beispiel #4
0
# run the simulation and save to disk
ex.createData(run_params_train, I_arr, states, net)
ex.mixtures2(run_params_test, I_arr[:num_odors_mix], states, net)

# load in the data from disk
spikes_t_arr, spikes_i_arr, I_arr, trace_V_arr, trace_t_arr, label_arr = anal.load_data(tr_prefix,
                                                                                        num_runs=num_odors_train)
spikes_t_test_arr, spikes_i_test_arr, I_test_arr, test_V_arr, test_t_arr, label_test_arr = anal.load_data(te_prefix,
                                                                                                          num_runs=num_alpha * num_test)

X = np.hstack(trace_V_arr).T

# normalize training data
mini = np.min(X)
maxi = np.max(X)
X = anal.normalize(X, mini, maxi)

y = np.hstack(label_arr)

# train the SVM
clf = anal.learnSVM(X, y)

test_data = test_V_arr
test_data = anal.normalize(test_data, mini, maxi)

y_test = np.mean(label_test_arr, axis=1)

pred_arr = []
A_arr = []
for i in range(len(test_data)):
    pred = clf.predict(test_data[i].T)
Beispiel #5
0
def count_vessels(probesizemicron, theta=[0]):
    # algorithm pseudocode:
    # normalize image slice
    # rotate probe
    # slice rotated image to generate line profiles
    # low pass filter line profiles to remove noise
    # find peaks of line profile (local maxima)
    # discard peaks below empirical threshold
    # count the number of peaks --> number of crossed vessels at  location
    # no need to adjust discard theshold with depth
    # repeat above for all substacks in the whole imaging volume
    # p.s. tested on 50um substacks
    thresh = -0.6
    # create sub-z tiff stack
    subzstack = subz_from_3dtiff(tif3darray, 50)
    print tif3darray.shape
    print subzstack.shape

    # make mask of probe cross-section
    probesizepix = micron2pix(probesizemicron)
    l = max(probesizepix)
    k = np.zeros((l, l))
    k[l / 2] = 1  # horizontal probe
    damage2d = np.zeros(
        (len(theta), ((subzstack.shape[1] - l) / downsample + 1),
         ((subzstack.shape[2] - l) / downsample + 1)))
    lineprofiles = np.zeros((subzstack.shape[0], len(theta),
                             ((subzstack.shape[1] - l) / downsample + 1),
                             ((subzstack.shape[1] - l) / downsample + 1)),
                            dtype=object)
    print damage2d.shape
    for lnum, layer in enumerate(subzstack):
        print 'layer ' + str(lnum + 1) + ' of ' + str(subzstack.shape[0])
        layer = dsp.normalize(layer)
        for i, t in enumerate(theta):
            print('%s degrees' ', all translations...' % t)
            rotated = misc.imrotate(k, t, interp='nearest')
            # get rid of rotation artifacts
            rotated[rotated < rotated.max()] = 0
            rotated[rotated == rotated.max()] = 1
            # for y in xrange(layer.shape[0]+l): # add step here...
            ys = 0
            for y in xrange(0, layer.shape[0] - l, downsample):
                xs = 0
                # for x in xrange(layer.shape[1]-l): # add step here...
                for x in xrange(0, layer.shape[1] - l, downsample):
                    a = rotated * layer[y:y + l, x:x + l]
                    b = a.T[a.T.nonzero()]
                    # luminance est of collision

                    # profl = b
                    profl = dsp.smoothg(
                        b, 20)  # smoothed profile for this location
                    # if b.mean() > .5:
                    c = profl > 1.5
                    damage2d[i, ys, xs] += np.diff(c).nonzero()[0][::2].size
                    profl[profl > 1.5] = 1.5

                    profl[profl < thresh] = thresh  # discard small peaks

                    lineprofiles[lnum, i, ys, xs] = profl

                    # count peaks --> vessels
                    # damage2d[i,y,x] += sum(dsp.islocmax(profl))
                    damage2d[i, ys, xs] += argrelmax(profl, order=5)[0].size
                    # damage2d[i, ys, xs] += len(find_events(profl, thresh))
                    # damage2d[i, ys, xs] += len(find_peaks_cwt(b, np.arange(5, 20)))
                    xs += 1
                ys += 1

    return damage2d, lineprofiles, subzstack
Beispiel #6
0
def count_vessels(probesizemicron, theta = [0]):
    # algorithm pseudocode:
    # normalize image slice
    # rotate probe
    # slice rotated image to generate line profiles 
    # low pass filter line profiles to remove noise
    # find peaks of line profile (local maxima)
    # discard peaks below empirical threshold  
    # count the number of peaks --> number of crossed vessels at  location
    # no need to adjust discard theshold with depth
    # repeat above for all substacks in the whole imaging volume
    # p.s. tested on 50um substacks
    thresh = -0.6
    # create sub-z tiff stack
    subzstack = subz_from_3dtiff(tif3darray, 50)
    print tif3darray.shape
    print subzstack.shape
    
    # make mask of probe cross-section
    probesizepix = micron2pix(probesizemicron)
    l = max(probesizepix)
    k = np.zeros((l,l))    
    k[l/2] = 1 # horizontal probe
    damage2d = np.zeros((len(theta),
                         ((subzstack.shape[1] - l) / downsample + 1),
                         ((subzstack.shape[2] - l) / downsample + 1)))
    lineprofiles = np.zeros((subzstack.shape[0], len(theta), 
                             ((subzstack.shape[1] - l) / downsample + 1),
                             ((subzstack.shape[1] - l) / downsample + 1)), dtype=object)
    print damage2d.shape
    for lnum,layer in enumerate(subzstack):
        print 'layer '+str(lnum+1)+' of '+str(subzstack.shape[0])
        layer = dsp.normalize(layer)
        for i, t in enumerate(theta): 
            print('%s degrees'', all translations...'%t)
            rotated = misc.imrotate(k, t, interp='nearest')
            # get rid of rotation artifacts
            rotated[rotated < rotated.max()] = 0
            rotated[rotated == rotated.max()] = 1
            # for y in xrange(layer.shape[0]+l): # add step here...
            ys = 0
            for y in xrange(0, layer.shape[0]-l, downsample):
                xs = 0 
                # for x in xrange(layer.shape[1]-l): # add step here...
                for x in xrange(0, layer.shape[1]-l, downsample): 
                    a=rotated*layer[y:y+l,x:x+l]
                    b=a.T[a.T.nonzero()]
                    # luminance est of collision

                    # profl = b
                    profl = dsp.smoothg(b,20) # smoothed profile for this location
                    # if b.mean() > .5:
                    c = profl > 1.5
                    damage2d[i,ys,xs] += np.diff(c).nonzero()[0][::2].size
                    profl[profl > 1.5] = 1.5
                    
                    profl[profl < thresh] = thresh # discard small peaks
                    
                    lineprofiles[lnum, i, ys, xs] = profl
                    
                    # count peaks --> vessels
                    # damage2d[i,y,x] += sum(dsp.islocmax(profl))
                    damage2d[i,ys,xs] += argrelmax(profl, order=5)[0].size 
                    # damage2d[i, ys, xs] += len(find_events(profl, thresh))
                    # damage2d[i, ys, xs] += len(find_peaks_cwt(b, np.arange(5, 20)))
                    xs += 1
                ys += 1

    return damage2d, lineprofiles, subzstack