Ejemplo n.º 1
0
def run(nogui):
    
    reader = NML2Reader(verbose=True)

    filename = 'test_files/NML2_SingleCompHHCell.nml'
    print('Loading: %s'%filename)
    reader.read(filename, symmetric=True)
    
    
    msoma = reader.getComp(reader.doc.networks[0].populations[0].id,0,0)
    print(msoma)
    
    
    data = moose.Neutral('/data')
    
    pg = reader.getInput('pulseGen1')
    
    inj = moose.Table('%s/pulse' % (data.path))
    moose.connect(inj, 'requestOut', pg, 'getOutputValue')
    
    
    vm = moose.Table('%s/Vm' % (data.path))
    moose.connect(vm, 'requestOut', msoma, 'getVm')
    
    simdt = 1e-6
    plotdt = 1e-4
    simtime = 300e-3
    #moose.showmsg( '/clock' )
    for i in range(8):
        moose.setClock( i, simdt )
    moose.setClock( 8, plotdt )
    moose.reinit()
    moose.start(simtime)
    
    print("Finished simulation!")
    
    t = np.linspace(0, simtime, len(vm.vector))
    
    if not nogui:
        import matplotlib.pyplot as plt

        vfile = open('moose_v_hh.dat','w')

        for i in range(len(t)):
            vfile.write('%s\t%s\n'%(t[i],vm.vector[i]))
        vfile.close()
        plt.subplot(211)
        plt.plot(t, vm.vector * 1e3, label='Vm (mV)')
        plt.legend()
        plt.title('Vm')
        plt.subplot(212)
        plt.title('Input')
        plt.plot(t, inj.vector * 1e9, label='injected (nA)')
        #plt.plot(t, gK.vector * 1e6, label='K')
        #plt.plot(t, gNa.vector * 1e6, label='Na')
        plt.legend()
        plt.figure()
        test_channel_gates()
        plt.show()
        plt.close()
Ejemplo n.º 2
0
def main():
    gw = gridworld()
    a = agent(gw)

    for epoch in range(20):
        a.initEpoch()
        while True:
            rwd, stat, act = a.takeAction()
            a.updateQ(rwd, stat, act)
            if gw.status() == 'Goal':
                break
            if mod(a.counter, 10)==0:
                print(gw.state())
                print(gw.field())
        print('Finished')
        print(a.counter)
        print(gw.state())
        print(gw.field())
        Q = transpose(a.Q(), (2,0,1))
        for i in range(4):
            plt.subplot(2,2,i)
            plt.imshow(Q[i], interpolation='nearest')
            plt.title(a.actions()[i])
            plt.colorbar()
        plt.show()
Ejemplo n.º 3
0
    def render(self, interval=50, **kwargs):
        import matplotlib.cm as cm
        import matplotlib.animation as animation
        import matplotlib.pyplot as plt

        p = self.plot_layout
        _axs = []
        for i in range(self.image_list[0].shape[2]):
            plt.subplot(p[0], p[1], 1 + i)
            # Hide the x and y labels
            plt.axis('off')
            _ax = plt.imshow(self.image_list[0][:, :, i], cmap=cm.Greys_r,
                             **kwargs)
            _axs.append(_ax)

        def init():
            return _axs

        def animate(j):
            for k, _ax in enumerate(_axs):
                _ax.set_data(self.image_list[j][:, :, k])
            return _axs

        self._ani = animation.FuncAnimation(self.figure, animate,
                                            init_func=init,
                                            frames=len(self.image_list),
                                            interval=interval, blit=True)
        return self
Ejemplo n.º 4
0
def test_tripcolor():
    x = np.asarray([0, 0.5, 1, 0,   0.5, 1,   0, 0.5, 1, 0.75])
    y = np.asarray([0, 0,   0, 0.5, 0.5, 0.5, 1, 1,   1, 0.75])
    triangles = np.asarray([
        [0, 1, 3], [1, 4, 3],
        [1, 2, 4], [2, 5, 4],
        [3, 4, 6], [4, 7, 6],
        [4, 5, 9], [7, 4, 9], [8, 7, 9], [5, 8, 9]])

    # Triangulation with same number of points and triangles.
    triang = mtri.Triangulation(x, y, triangles)

    Cpoints = x + 0.5*y

    xmid = x[triang.triangles].mean(axis=1)
    ymid = y[triang.triangles].mean(axis=1)
    Cfaces = 0.5*xmid + ymid

    plt.subplot(121)
    plt.tripcolor(triang, Cpoints, edgecolors='k')
    plt.title('point colors')

    plt.subplot(122)
    plt.tripcolor(triang, facecolors=Cfaces, edgecolors='k')
    plt.title('facecolors')
Ejemplo n.º 5
0
def plot_data(kx,omega,F,F_R,F_L,K,O):
    #plt.figure(4)
    #plt.imshow(K,extent=[omega[0],omega[-1],kx[0],kx[-1]],\
    #        interpolation = "nearest", aspect = "auto")
    #plt.xlabel('KX')
    #plt.colorbar()
    
    #plt.figure(5)
    #plt.imshow(O,extent =[omega[0],omega[-1],kx[0],kx[-1]],interpolation="nearest", aspect="auto")
    #plt.xlabel('omega')
    #plt.colorbar()
    
    plt.figure(6)
    pylab.subplot(1,2,1)
    plt.imshow(abs(F_R), extent= [omega[0],omega[-1],kx[0],kx[-1]], interpolation= "nearest", aspect = "auto")
    plt.xlabel('abs FFT_R')
    plt.colorbar()
    plt.subplot(1,2,2)
    plt.imshow(abs(F_L), extent= [omega[0],omega[-1],kx[0],kx[-1]], interpolation= "nearest", aspect = "auto")
    plt.xlabel('abs FFT_L')
    plt.colorbar()
    
    
    plt.figure(7)
    plt.subplot(2,1,1)
    plt.imshow(abs(F_L+F_R),extent=[omega[0],omega[-1],kx[0],kx[-1]],interpolation= "nearest", aspect = "auto")
    plt.xlabel('abs(F_L+F_R)  reconstructed')
    plt.colorbar()
    pylab.subplot(2,1,2)
    plt.imshow(abs(F),extent=[omega[0],omega[-1],kx[0],kx[-1]],interpolation ="nearest",aspect = "auto")
    plt.xlabel('FFT of the original data')
    plt.colorbar()

    #plt.show()
    return
Ejemplo n.º 6
0
def plot_wav_fft(wav_filename, desc=None):
    plt.clf()
    plt.figure(num=None, figsize=(6, 4))
    sample_rate, X = scipy.io.wavfile.read(wav_filename)
    spectrum = np.fft.fft(X)
    freq = np.fft.fftfreq(len(X), 1.0 / sample_rate)

    plt.subplot(211)
    num_samples = 200.0
    plt.xlim(0, num_samples / sample_rate)
    plt.xlabel("time [s]")
    plt.title(desc or wav_filename)
    plt.plot(np.arange(num_samples) / sample_rate, X[:num_samples])
    plt.grid(True)

    plt.subplot(212)
    plt.xlim(0, 5000)
    plt.xlabel("frequency [Hz]")
    plt.xticks(np.arange(5) * 1000)
    if desc:
        desc = desc.strip()
        fft_desc = desc[0].lower() + desc[1:]
    else:
        fft_desc = wav_filename
    plt.title("FFT of %s" % fft_desc)
    plt.plot(freq, abs(spectrum), linewidth=5)
    plt.grid(True)

    plt.tight_layout()

    rel_filename = os.path.split(wav_filename)[1]
    plt.savefig("%s_wav_fft.png" % os.path.splitext(rel_filename)[0],
                bbox_inches='tight')
Ejemplo n.º 7
0
Archivo: plotter.py Proyecto: de-git/ml
def plot_feature_comparison(title, trajectory, features):
    plotCount = features.shape[1]
    pointCount = features.shape[0]
    fig = plt.figure()
    plt.title(title)
    #plt.ion()
    #plt.show()
    max = np.amax(features)
    min = np.amin(features)
    print "min=" + str(min) + ", max=" + str(max)
    for i in range(plotCount):
        plt.subplot(plotCount/2, 2, 1+i)
        f = features[:,i]
        
        for k in range(pointCount):
            color = ''
            if(f[k] > max * 0.6):
                color = 'r'
            elif(f[k] > max * 0.3):
                color = 'y'
            elif(f[k] < min * 0.3):
                color = 'b'
            elif(f[k] < min * 0.6):
                color = 'g'
                
            if (color != ''):
                plt.plot(trajectory[k,0], trajectory[k,1], color+'.', markersize=20)
                #plt.draw()
        plt.plot(trajectory[:,0], trajectory[:,1], 'k')
    plt.show()
    #raw_input()
    return
Ejemplo n.º 8
0
def simulate():
    # Plotting the PDF of Unif(0, 1)
    pyplot.subplot(211)
    x = np.linspace(stats.uniform.ppf(0), stats.uniform.ppf(1), 100)
    pyplot.title('PDF of Unif(0, 1)')
    pyplot.plot(x, stats.uniform.pdf(x))

    print("Xn is for n=1000: ", get_xn(1000))
    E_Xn = 0.5  # As we know, E(Xn) is equal to mu which is 0.5
    print("E(Xn) is : ", E_Xn)

    n = np.linspace(1, 1000, 1000)
    X_ns = []
    for i in range(1, 1001):
        X_ns.append(get_xn(i))
    pyplot.subplot(212)
    pyplot.title('f(n,Xn)')
    pyplot.plot(n, X_ns, '-g')

    print("Xn for n=1", get_xn(1))
    print("Xn for n=5", get_xn(5))
    print("Xn for n=25", get_xn(25))
    print("Xn for n=100", get_xn(100))

    pyplot.show()
Ejemplo n.º 9
0
def test_clipping():
    exterior = mpath.Path.unit_rectangle().deepcopy()
    exterior.vertices *= 4
    exterior.vertices -= 2
    interior = mpath.Path.unit_circle().deepcopy()
    interior.vertices = interior.vertices[::-1]
    clip_path = mpath.Path(vertices=np.concatenate([exterior.vertices,
                                                    interior.vertices]),
                           codes=np.concatenate([exterior.codes,
                                                 interior.codes]))

    star = mpath.Path.unit_regular_star(6).deepcopy()
    star.vertices *= 2.6

    ax1 = plt.subplot(121)
    col = mcollections.PathCollection([star], lw=5, edgecolor='blue',
                                      facecolor='red', alpha=0.7, hatch='*')
    col.set_clip_path(clip_path, ax1.transData)
    ax1.add_collection(col)

    ax2 = plt.subplot(122, sharex=ax1, sharey=ax1)
    patch = mpatches.PathPatch(star, lw=5, edgecolor='blue', facecolor='red',
                               alpha=0.7, hatch='*')
    patch.set_clip_path(clip_path, ax2.transData)
    ax2.add_patch(patch)

    ax1.set_xlim([-3, 3])
    ax1.set_ylim([-3, 3])
Ejemplo n.º 10
0
def view_punch(params, fmt):
    dates, UdtConnSucRate, UdpBroConnSucRate, PunchHoleSucRate, TotalPunchHoleSucRate = get_punch(['Time', 'UdtConnSucRate', 'UdpBroConnSucRate', 'PunchHoleSucRate', 'TotalPunchHoleSucRate'], params)
    dates = [parser.parse(d) for d in dates]

    UdtConnSucRate = [float(v)*100 for v in UdtConnSucRate]
    UdpBroConnSucRate = [float(v)*100 for v in UdpBroConnSucRate]
    PunchHoleSucRate = [float(v)*100 for v in PunchHoleSucRate]
    TotalPunchHoleSucRate = [float(v)*100 for v in TotalPunchHoleSucRate]

    ax = plt.subplot(411)
    ax.set_ylim([0, 100])
    plt.ylabel("UDT反连(%)")
    plt.plot_date(dates, UdpBroConnSucRate, fmt, label=params['isp'])
    plt.legend(loc='upper left', ncol=2)

    ax = plt.subplot(412)
    ax.set_ylim([0, 100])
    plt.ylabel("UDT直连(%)")
    plt.plot_date(dates, UdtConnSucRate, fmt, label=params['isp'])

    ax = plt.subplot(414)
    ax.set_ylim([0, 100])
    plt.ylabel("内网穿透(%)")
    plt.plot_date(dates, PunchHoleSucRate, fmt, label=params['isp'])

    '''
Ejemplo n.º 11
0
def plot_images(data_list, data_shape="auto", fig_shape="auto"):
    """
    plotting data on current plt object.
    In default,data_shape and fig_shape are auto.
    It means considered the data as a sqare structure.
    """
    n_data = len(data_list)
    if data_shape == "auto":
        sqr = int(n_data ** 0.5)
        if sqr * sqr != n_data:
            data_shape = (sqr + 1, sqr + 1)
        else:
            data_shape = (sqr, sqr)
    plt.figure(figsize=data_shape)

    for i, data in enumerate(data_list):
        plt.subplot(data_shape[0], data_shape[1], i + 1)
        plt.gray()
        if fig_shape == "auto":
            fig_size = int(len(data) ** 0.5)
            if fig_size ** 2 != len(data):
                fig_shape = (fig_size + 1, fig_size + 1)
            else:
                fig_shape = (fig_size, fig_size)
        Z = data.reshape(fig_shape[0], fig_shape[1])
        plt.imshow(Z, interpolation="nearest")
        plt.tick_params(labelleft="off", labelbottom="off")
        plt.tick_params(axis="both", which="both", left="off", bottom="off", right="off", top="off")
        plt.subplots_adjust(hspace=0.05)
        plt.subplots_adjust(wspace=0.05)
def plot_profiles(temp, salt, pres, filename, savedir):
    """
    Escrever docstring
    INPUT: 
        cond: blaa blaa
    OUTPUT: 
        graphs are genarated and seved at "savedir"
    """
    plt.figure(figsize=(10,5))
    
    plt.subplot(121)
    plt.plot(temp, -pres, color='#ef6548', linewidth=2)
    plt.title('Temperature')

    plt.subplot(122)
    plt.plot(salt, -pres, color='#238b45', linewidth=2)
    plt.title('Salinity')

    if sys.platform == 'win32' or sys.platform == 'win64':
        figname = filename.split('\\')[-1].split('.')[0] + '.png'
    else:
        figname = filename.split('/')[-1].split('.')[0] + '.png'

    figname = os.path.join(savedir, figname)
    print "    --> saving png"
    plt.savefig(figname)
Ejemplo n.º 13
0
def template_matching():
    img = cv2.imread('messi.jpg',0)
    img2 = img.copy()
    template = cv2.imread('face.png',0)
    w, h = template.shape[::-1]

    # All the 6 methods for comparison in a list
    methods = ['cv2.TM_CCOEFF', 'cv2.TM_CCOEFF_NORMED', 'cv2.TM_CCORR',
            'cv2.TM_CCORR_NORMED', 'cv2.TM_SQDIFF', 'cv2.TM_SQDIFF_NORMED']

    for meth in methods:
        img = img2.copy()
        method = eval(meth)

        # Apply template Matching
        res = cv2.matchTemplate(img,template,method)
        min_val, max_val, min_loc, max_loc = cv2.minMaxLoc(res)

        # If the method is TM_SQDIFF or TM_SQDIFF_NORMED, take minimum
        if method in [cv2.TM_SQDIFF, cv2.TM_SQDIFF_NORMED]:
            top_left = min_loc
        else:
            top_left = max_loc
        bottom_right = (top_left[0] + w, top_left[1] + h)

        cv2.rectangle(img,top_left, bottom_right, 255, 2)

        plt.subplot(121),plt.imshow(res,cmap = 'gray')
        plt.title('Matching Result'), plt.xticks([]), plt.yticks([])
        plt.subplot(122),plt.imshow(img,cmap = 'gray')
        plt.title('Detected Point'), plt.xticks([]), plt.yticks([])
        plt.suptitle(meth)

        plt.show()
Ejemplo n.º 14
0
def makeFftFig():
    plt.figure(3)
    plt.subplot(211)
    plt.plot(W,f_signal,'bo')
    plt.subplot(212)
    plt.plot(W,cut_f_signal,'bo')
    plt.show()
def plot_logpdf_Gaussian():
    y2 = -6
    logvar = np.arange(-4,4,0.05)
    logprior = np.array([lp_gaussian(lv, 0, 1) for lv in logvar])
    def plot_logvar(mu):
        lp = np.array([lp_gaussian(y2, mu, np.exp(lv)) for lv in logvar])
        plt.plot(logvar, lp+logprior, label='mu = {}'.format(mu))
        plt.ylim(-20,3)
    plt.clf()
    plt.subplot(2,1,1)
    plt.plot(logvar, logprior, label='prior', lw=5)
    plot_logvar(0)
    plot_logvar(-6)
    plt.xlabel('logvar')
    plt.legend()
    mu = np.arange(-8,8,0.05)
    logprior = np.array([lp_gaussian(m,0,1) for m in mu])
    def plot_mu(var):
        lp = np.array([lp_gaussian(y2, m, var) for m in mu])
        lp = lp + logprior
        plt.plot(mu, lp, label='var = {}'.format(var))
        plt.ylim(-30,0)
    plt.subplot(2,1,2)
    plt.plot(mu, logprior, label='prior', lw=5)
    plot_mu(0.1)
    plot_mu(1.0)
    plot_mu(10)
    plt.xlabel('mu')
    plt.legend()
Ejemplo n.º 16
0
def draw(ord_l, gaps):

    axScatter = plt.subplot(3, 1, 1)

    number_samples=0
    # axScatter.scatter([i['seq'] for i in ord_l[-number_samples:]], [i['a'] for i in ord_l[-number_samples:]], s=2, color='r', label='ch1')
    axScatter.scatter([i['seq'] % 24 for i in ord_l[-number_samples:]], [i['d'] for i in ord_l[-number_samples:]], s=2, color='r', label='ch1')
    # axScatter.scatter(time_l[-number_samples:], b_l[-number_samples:], s=2, color='c', label='ch2')
    # axScatter.scatter(time_l[-number_samples:], c_l[-number_samples:], s=2, color='y', label='ch3')
    # axScatter.scatter(time_l[-number_samples:], d_l[-number_samples:], s=2, color='g', label='ch4')
    plt.ylim(-9000000, 9000000)
    plt.legend()
    axScatter.set_xlabel("Sequence Packet")
    axScatter.set_ylabel("Voltage")
    plt.title("Channels Values")


    # time_plot = plt.subplot(3, 1, 2)
    # time_plot.scatter([i['seq'] for i in ord_l[-number_samples:]], [i['delta'] for i in ord_l[-number_samples:]], s=1, color='r', label='delta')
    # time_plot.set_xlabel("Sequence Packet")
    # time_plot.set_ylabel("Delta to referencial")
    # ax2 = time_plot.twinx()
    # ax2.scatter([i['seq'] for i in ord_l[-number_samples:]], [i['ts'] for i in ord_l[-number_samples:]], s=2, color='g', label='Timestamp')
    # ax2.set_ylabel("Kernel time")
    # plt.title("Timestamp deltas")

    gaps_draw = plt.subplot(3, 1, 3)
    gaps_draw.plot([i[0] for i in gaps[-number_samples:]], [i[1] for i in gaps[-number_samples:]], color='b', marker='.', label='gaps')
    gaps_draw.set_ylim(-0.5, 1.5)

    plt.draw()
    # plt.savefig("res.png")
    plt.show()
Ejemplo n.º 17
0
def calc_snr(plot = False):
    tx = fromfile(open('tx_sym.32fc'), dtype=complex64)
    rx = fromfile(open('rx_sym.32fc'), dtype=complex64)

    if (len(tx) == 0 or len(rx) == 0):
        print 'Not valid data'
        print '\tPlease run gnuradio simulation first'
        exit(-1)

    size = min([len(tx), len(rx)]) - 1
    rx = rx[0:size]
    tx = tx[0:size]

    tx_power = sum([abs(tx[i])**2 for i in range(size)])
    rx_power = sum([abs(rx[i])**2 for i in range(size)])
    noise_power = sum([abs(tx[i] - rx[i])**2 for i in range(size)])

    SNR = 1.0*tx_power/noise_power
    SNR_dB = 10*log10(SNR)

    if plot:
        init = 0
        end = 100
        p.subplot(211)
        p.plot(list(real(tx[init:end])), '-o')
        p.plot(list(imag(tx[init:end])), '-o')
        p.subplot(212)
        p.plot(list(real(rx[init:end])), '-o')
        p.plot(list(imag(rx[init:end])), '-o')
        p.show()

    return SNR_dB
Ejemplo n.º 18
0
def my_predict(model, image, show=False):
    image = image + train_mean
    images = transform_img(image.transpose(1,2,0))

    if show:
        plt.figure()
        plt.imshow(image.transpose(1,2,0).astype(np.uint8))
        plt.show()
        plt.figure()
        for i in range(9):
            plt.subplot(3,3,i+1)
            plt.imshow(images[i].astype(np.uint8))
        plt.show()

    images = images.transpose(0, 3, 1, 2)
    masks = model.predict(images - train_mean)

    masks = masks.transpose(0,2,3,1)[:,:,:,0]
    masks = reverse_gt(masks)
    mask = np.mean(masks, axis=0)

    if show:
        masks = (masks * 255).astype(np.uint8)
        mask = np.mean(masks, axis=0)
        plt.figure()
        for i in range(9):
            plt.subplot(3,3,i+1)
            plt.imshow(masks[i], cmap='Greys_r')
        plt.show()
        plt.figure()
        plt.imshow(mask, cmap='Greys_r')
        plt.show()
    return mask
Ejemplo n.º 19
0
def create_comparison_plot(x, T1_true, P2_true, T2_true, P3_true, T1_ml, P2_ml, T2_ml, P3_ml, value1, value2):
        Chi2_values = []
        errorbar = []
        y = func(x, T1_true, P2_true, T2_true, P3_true, value1, value2)
        y1 = model1(x, T1_true, P2_true, T2_true, P3_true, value1, value2)
        model = func(x, T1_ml, P2_ml, T2_ml, P3_ml, value1, value2)
        for item in model:
            value = 0.05*item
            errorbar.append(value)
        for k in range(0,np.size(y)):
            item = ((y[k]-model[k])/(0.05*y[k]))**2
            Chi2_values.append(item)
            suma1 = np.sum(Chi2_values)
        #Plot the data:
        f = plt.figure()
        gs = gridspec.GridSpec(2,1 ,height_ratios=[10,4])
        ax1 = plt.subplot(gs[0])
        ax1.plot(x,y,'-',label = 'observations')
        ax1.plot(x,y1,'-',label=  'initial guess')
        #ax1.errorbar(x,model,yerr=errorbar)
        ax1.plot(x,model,'-',label = 'model')
        ax1.legend()
        plt.title("Error:"+ str(suma1))
        plt.ylabel('I/F')
        ax2 = plt.subplot(gs[1])
        ax2.plot(x,Chi2_values,'-',label = '$\chi^2$')
        ax2.legend()
        plt.xlabel('Longitude (SIII)')
        plt.ylabel('$\chi^2$')
        plt.show()
        f.savefig('output.png', format='png', dpi=1000)
Ejemplo n.º 20
0
def smooth_demo():

    Smoother = SmoothClass()



    xfile = ArrayClass("DatasetX")
    #print xfile.array
    xn = np.array(xfile.array)

    plt.subplot(211)
    #plt.plot(np.ones(ws))

    windows=['blackman']
    #windows=['flat', 'hanning', 'hamming', 'bartlett', 'blackman']

    plt.hold(True)


    plt.axis([0,30,0,1.1])

    plt.legend(windows)
    plt.title("The smoothing windows")
    plt.subplot(212)
    #plt.plot(xn)
    plt.plot(xn)
    plt.plot(Smoother.smooth(xn,10,'blackman'))
Ejemplo n.º 21
0
def do_day_hours(day_date_string, hour_numbers, basepath=None, show_plots=False):
    n_plots = len(hour_numbers)
    n_plotrows = 2 if n_plots > 3 else 1  # for now
    if show_plots:
        #        plt.interactive(True)
        plt.figure()
    for (i_plot, hr) in enumerate(hour_numbers):
        # get 2 filespecs to search for datafiles
        spec_strings = pairof_MOGUK_search_filepaths(day_date_string, hr)
        # for now, take **alphabetical last** of files matching target time (==latest forecast date)
        print "spec_strings", spec_strings
        filepair = [sorted(glob.glob(spec))[-1] for spec in spec_strings]
        base_outname = "alluk_%6s_%02d_" % (day_date_string, hr)
        outdir_path = dayhours_outpath + ("day_%s/" % day_date_string[-4:])
        out_filepath = outdir_path + base_outname + "spd_and_dir.csv"

        if show_plots:
            n_subplot = 100 * n_plotrows + 10 * ((n_plots + n_plotrows - 1) // n_plotrows) + i_plot + 1
            print "subplot : ", n_subplot
            plt.subplot(n_subplot)
        produce_csv_files(
            region=[-1000, -1000, 1000, 1000], in_filenames=filepair, out_filepath=out_filepath, show_plot=show_plots
        )
    if show_plots:
        plt.show()
Ejemplo n.º 22
0
    def plot_main_seeds(self, qname, radio=False, checkbox=False, 
                        numerical=False, array=False):
        """ Plot the responses separately for each seed group in main_seeds. """
        
        assert sum([radio, checkbox, numerical, array]) == 1

        for seed in self.main_seeds:
            responses_seed = self.filter_rows_by_seed(seed, self.responses)
            responses_seed_question = self.filter_columns_by_name(qname, responses_seed)

            plt.subplot(int("22" + str(self.main_seeds.index(seed))))
            plt.title("Seed " + seed)

            if radio:
                self.plot_convergence_radio(qname, responses_seed_question)
            elif checkbox:
                self.plot_convergence_checkbox(responses_seed_question)
            elif numerical:
                self.plot_convergence_numerical(responses_seed_question)
            elif array:
                self.plot_array_boxes(qname, responses_seed_question)

        qtext = self.get_qtext_from_qname(qname)
        plt.suptitle(qtext)
        plt.tight_layout()
        plt.show()
Ejemplo n.º 23
0
	def __call__(self,u,w,bx,by,bz,b2,t):
		q = 8

		map = cm.red_blue()
		if self.x == None:
			nx = u.shape[2]
			nz = u.shape[0]
			self.x,self.y = np.meshgrid(range(nx),range(nz))
		x,y = self.x,self.y

		avgu = np.average(u,1)
		avgw = np.average(w,1)
		avgbx = np.average(bx,1)
		avgby = np.average(by,1)
		avgbz = np.average(bz,1)
		avgb2 = np.average(b2,1)
		avgt = np.average(t,1)

		plt.subplot(121)
		plt.imshow(avgt,cmap=map,origin='lower')
		plt.colorbar()
		plt.quiver(x[::q,::q],y[::q,::q],avgu[::q,::q],avgw[::q,::q])
		plt.title('Tracer-Vel')
		plt.axis("tight")

		plt.subplot(122)
		plt.imshow(avgby,cmap=map,origin='lower')
		plt.colorbar()
		plt.quiver(x[::q,::q],y[::q,::q],avgbx[::q,::q],avgbz[::q,::q])
		plt.title('By-Twist')
		plt.axis("tight")
Ejemplo n.º 24
0
def test_single_point():
    fig = plt.figure()
    plt.subplot( 211 )
    plt.plot( [0], [0], 'o' )
    plt.subplot( 212 )
    plt.plot( [1], [1], 'o' )
    fig.savefig( 'single_point' )
Ejemplo n.º 25
0
def ReleaseMemoryPlot2(mincut = 0.9, maxcut = 1, N = 100):
    step = (maxcut - mincut)/N
    cuts = [mincut + step*i for i in range(0, N+1)]
    
    released_memory = []
    good_memory = []
    part_of_good_memory = []
    
    all_memory = signal_test2.get_data(['DiskSize']).values[:,0].sum() + bck_test2.get_data(['DiskSize']).values[:,0].sum()
    memory_can_be_free = signal_test2.get_data(['DiskSize']).values[:,0].sum()
    
    for i in cuts:
        rm, gm, pm = ReleaseMemory2(cut = i)
        released_memory.append(rm)
        good_memory.append(gm)
        part_of_good_memory.append(pm)
    
    print 'all_memory = ', all_memory
    print 'memory_can_be_free = ', memory_can_be_free
    
    plt.subplot(1,1,1)
    plt.plot(cuts, released_memory, 'b', label = 'released memory')
    plt.plot(cuts, good_memory, 'r', label = 'good memory')
    plt.legend(loc = 'best')
    plt.show()
    
    plt.subplot(1,1,1)
    plt.plot(cuts, part_of_good_memory, 'r', label = 'part of good memory')
    plt.legend(loc = 'best')
    plt.show()
def visualize_singular_values(args):
    param_values = load_parameter_values(args.load_path)
    for d in range(args.layers):
        if args.rnn_type == 'lstm':
            ws = param_values["/recurrentstack/lstm_" + str(d) + ".W_state"]
            w_rec = ws[:, 3 * args.state_dim:]
        elif args.rnn_type == 'simple':
            w_rec = param_values["/recurrentstack/simplerecurrent_" + str(d) +
                                 ".W_state"]
        else:
            raise NotImplementedError
        U, s, V = np.linalg.svd(w_rec, full_matrices=True)
        plt.subplot(2, 1, 1)
        plt.plot(np.arange(s.shape[0]), s, label='Layer_' + str(d))
        plt.grid(True)
        plt.legend(loc='upper right')
        plt.title("Singular_values_of_recurrent_weights")
        plt.subplot(2, 1, 2)
        plt.plot(np.arange(s.shape[0]), np.log(s + 1E-15),
                 label='Layer_' + str(d))
        plt.grid(True)
        plt.title("Log_singular_values_of_recurrent_weights")
    plt.tight_layout()

    plt.savefig(args.save_path + "/visualize_singular_values.png")
    logger.info("Figure \"visualize_singular_values"
                ".png\" saved at directory: " + args.save_path)
def make_intergenerational_figure(data, lowerbound, upperbound, rows, title):
    plt.figure(figsize=(10,10))
    plt.suptitle(title,fontsize=20)
    for index in range(4):
        plt.subplot(2,2,index+1)    
        #simulation distribution
        plt.hist(accepted[:,rows[index]], normed=True, bins = range(0,100,5), color = col)
        #simulation values
        value = np.mean(accepted[:,rows[index]])
        std = 2*np.std(accepted[:,rows[index]])
        plt.errorbar((value,), (red_marker_location-0.02), xerr=((std,),(std,)),
                     color=col, fmt='o', linewidth=2, capsize=5, mec = col)
        #survey values
        value = data[index]
        lb = lowerbound[index]
        ub = upperbound[index]
        plt.errorbar((value,), (red_marker_location,), xerr=((value-lb,),(ub-value,)),
                     color='r', fmt='o', linewidth=2, capsize=5, mec = 'r')
        #labeling    
        plt.ylim(0,ylimit)
        plt.xlim(0,100)
    #make subplots pretty
    plt.subplot(2,2,1)
    plt.title("Males")
    plt.ylabel("'05\nFrequency")
    plt.subplot(2,2,2)
    plt.title("Females")
    plt.subplot(2,2,3)
    plt.ylabel("'08\nFrequency")
    plt.xlabel("Percent Responding Affirmatively")
    plt.subplot(2,2,4)
    plt.xlabel("Percent Responding Affirmatively")
Ejemplo n.º 28
0
    def subplot(self,names,title=None,style=None):
        assert isinstance(names,list)

        fig = plt.figure()
        if title is None:
            if isinstance(names,str):
                title = names
            else:
                assert isinstance(names,list)
                if len(names) == 1:
                    title = names[0]
                else:
                    title = str(names)
        fig.canvas.set_window_title(str(title))

        plt.clf()
        n = len(names)
        if style is None:
            style = [None]*n
        for k,name in enumerate(names):
            plt.subplot(n,1,k+1)
            if k==0:
                self._plot(name,title,style=style[k])
            else:
                self._plot(name,None,style=style[k])
Ejemplo n.º 29
0
def plotMultiGameTaskResults(directory, game, numTasks = 2):
    filename = directory + game
    fullShareResultsFilename = filename + "_fullShare.csv"
    layerShareResultsFilename = filename + "_layerShare.csv"
    repShareResultsFilename = filename + "_repShare.csv"
 
    games = game.split(",")
    if len(games) != numTasks:
        print "The number of games is not equal to the number of tasks - not plotting"
        return
 
    fullResults = getResultsFromFile(fullShareResultsFilename, numTasks)
    layerResults = getResultsFromFile(layerShareResultsFilename, numTasks)
    repResults = getResultsFromFile(repShareResultsFilename, numTasks)
 
   
    # figure = plt.figure()
    # subplot = figure.add_subplot(111)
    # plots = []
    # for i in xrange(numTasks):
    #     plots.append(subplot.plot(fullResults[0][0:len(fullResults[1][i])], fullResults[1][i], label="FullShare: " + str(i)))
 
    # figure.suptitle(title)
   
    for i in xrange(len(games)):
        plt.figure(i + 1)
        plt.subplot(111)
        plt.plot(fullResults[0][0:len(fullResults[1][i])], fullResults[1][i], label="Full Share")
        plt.plot(layerResults[0][0:len(layerResults[1][i])], layerResults[1][i], label="Layer Share")
        plt.plot(repResults[0][0:len(repResults[1][i])], repResults[1][i], label="Rep Share")
        plt.xlabel('epochs')
        plt.ylabel('Average Reward')
        plt.title(games[i])
        L = plt.legend()
        L.draggable(state=True)
Ejemplo n.º 30
0
def plot_energies(energies,
                  bulk_window=None, coda_window=None, downsample_to=None,
                  xlim_lin=None, xlim_log=None,
                  figsize=None, **kwargs):
    gs = gridspec.GridSpec(2 * len(energies), 2)
    gs.update(wspace=0.05)
    fig = plt.figure(figsize=figsize)
    sax1 = sax3 = None
    for i, tr in enumerate(energies):
        pair = get_pair(tr)
        otime = tr.stats.origintime
        if downsample_to is None:
            d = 1
        else:
            d = tr.stats.sampling_rate // downsample_to
        ts = np.arange(len(tr)) * tr.stats.delta
        ts = ts - (otime - tr.stats.starttime)
        c = 'k'
        ax2 = plt.subplot(gs[2 * i + 1, 0], sharex=sax1, sharey=sax1)
        ax1 = plt.subplot(gs[2 * i, 0], sharex=ax2)
        ax3 = plt.subplot(gs[2 * i:2 * i + 2, 1], sharex=sax3, sharey=sax3)
        ax1.annotate('%s' % pair[1], (1, 0.5), (-10, 0), 'axes fraction',
                     'offset points', size='small', ha='right', va='center')
        ax3.annotate('%s' % pair[0], (0, 1), (10, -5), 'axes fraction',
                     'offset points', size='small', ha='left', va='top')
        ax1.plot(ts[::d], tr.data[::d], color=c)
        ax2.semilogy(ts[::d], tr.data[::d], color=c)
        ax3.loglog(ts[::d], tr.data[::d], color=c)
        for ax in (ax1, ax2, ax3):
            plt.setp(ax.get_xticklabels(), visible=False)
            ax.set_yticklabels([])
            if 'ponset' in tr.stats:
                tponset = tr.stats.ponset - otime
                ax.axvline(tponset, color='green', alpha=0.5)
            if 'sonset' in tr.stats:
                tsonset = tr.stats.sonset - otime
                ax.axvline(tsonset, color='b', alpha=0.5)
        for ax in (ax2, ax3):
            if bulk_window and coda_window:
                c = ('b', 'k')
                wins = (bulk_window[pair], coda_window[pair])
                for i, win in enumerate(wins):
                    ax.axvspan(win[0] - otime, win[1] - otime,
                               0.05, 0.08, color=c[i], alpha=0.5)

        if sax1 is None:
            sax1 = ax2
            sax3 = ax3
    if xlim_lin:
        ax1.set_xlim(xlim_lin)
    if xlim_log:
        ax3.set_xlim(xlim_log)
    loglocator = mpl.ticker.LogLocator(base=100)
    ax2.yaxis.set_major_locator(loglocator)
    ax3.yaxis.set_major_locator(loglocator)
    ax2.yaxis.set_minor_locator(mpl.ticker.NullLocator())
    ax3.yaxis.set_minor_locator(mpl.ticker.NullLocator())
    plt.setp(ax2.get_xticklabels(), visible=True)
    plt.setp(ax3.get_xticklabels(), visible=True)
    _savefig(fig, **kwargs)
Ejemplo n.º 31
0
    def infraCurve(self):

        if not hasattr(self, "current_lin_Ro"):
            if self.prefs.debug:
                print(
                    printMessage("warning"),
                    " Run a search for overpressure or period first before plotting a point!"
                )
            errorMessage(
                "No points to plot!",
                1,
                detail=
                'Please use one of the searches to find Ro for both weak-shock and linear!'
            )
            return None

        if not self.current_lin_Ro is None and not self.current_ws_Ro is None and not self.current_height is None:
            self.bam.infra_curve.append(
                [self.current_lin_Ro, self.current_ws_Ro, self.current_height])

        if len(self.bam.infra_curve) == 0:
            errorMessage("No points to plot!",
                         1,
                         detail='Please use one of the searches to find Ro!')
            return None

        ax1 = plt.subplot(2, 1, 1)
        E_lin = []
        E_ws = []

        for point in self.bam.infra_curve:

            h = point[2]
            E_lin.append(Efunction(point[0], h))
            E_ws.append(Efunction(point[1], h))

        ax1.scatter(h / 1000, E_lin, label='Linear')
        ax1.scatter(h / 1000, E_ws, label="Weak Shock")

        ax1.set_xlabel("Height [km]")
        ax1.set_ylabel("Energy per Unit Length [J/m]")

        ax2 = plt.subplot(2, 1, 2, sharex=ax1)

        light_curve = readLightCurve(self.bam.setup.light_curve_file)

        light_curve_list = processLightCurve(light_curve)

        for L in light_curve_list:
            ax2.scatter(L.h, L.M, label=L.station)
            # light_curve_curve = pg.ScatterPlotItem(x=L.M, y=L.t)
            # self.light_curve_canvas.addItem(light_curve_curve)

        ax2.set_xlabel("Height [km]")
        ax2.set_ylabel("Absolute Magnitude")
        plt.gca().invert_yaxis()
        plt.legend()

        # ax3 = plt.subplot(3, 1, 3, sharex=ax2)
        # v = self.bam.setup.trajectory.v
        # ax3.scatter(np.array(h)/1000, np.array(E_lin)/v, label='Linear')
        # ax3.scatter(np.array(h)/1000, np.array(E_ws)/v, label="Weak Shock")
        # for L in light_curve_list:
        #     ax3.scatter(L.h, 10**(-0.4*np.array(L.M)), label=L.station)

        # ax3.set_xlabel("Height [km]")
        # ax3.set_ylabel("?? Max Intensity ??")

        # plt.legend()
        plt.show()
Ejemplo n.º 32
0
               return_sequences=True,
               stateful=True))
model.add(LSTM(50,
               return_sequences=False,
               stateful=True))
model.add(Dense(1))
model.compile(loss='mse', optimizer='rmsprop')

print('Training')
for i in range(epochs):
    print('Epoch', i, '/', epochs)
    model.fit(cos,
              expected_output,
              batch_size=batch_size,
              verbose=1,
              nb_epoch=1,
              shuffle=False)
    model.reset_states()

print('Predicting')
predicted_output = model.predict(cos, batch_size=batch_size)

print('Plotting Results')
plt.subplot(2, 1, 1)
plt.plot(expected_output)
plt.title('Expected')
plt.subplot(2, 1, 2)
plt.plot(predicted_output)
plt.title('Predicted')
plt.show()
Ejemplo n.º 33
0
import cv2 as cv
import matplotlib.pyplot as plt
import numpy as np

# 读取图像
source = cv.imread('demo.png', cv.IMREAD_GRAYSCALE)

# 设置卷积核
kernel = np.ones((5, 5), np.uint8)

# 图像腐蚀
erode_img = cv.erode(source, kernel)

# 图像膨胀
dilate_result = cv.dilate(source, kernel)

# 显示结果
titles = ['Source Img', 'Erode Img', 'Dilate Img']
images = [source, erode_img, dilate_result]

# matplotlib 绘图
for i in range(3):
    plt.subplot(1, 3, i + 1), plt.imshow(images[i], 'gray')
    plt.title(titles[i])
    plt.xticks([]), plt.yticks([])

plt.show()
Ejemplo n.º 34
0
def plot_spectra(freqs,
                 fluxes,
                 errs,
                 models,
                 names,
                 params,
                 param_errs,
                 rcs,
                 BICs,
                 colours,
                 labels,
                 figname,
                 annotate=True,
                 model_selection='better'):
    """Plot a figure of the radio spectra of an individual source, according to the input data and models.

    Arguments:
    ----------
    freqs : list
        A list of frequencies in MHz.
    fluxes : list
        A list of fluxes in Jy.
    errs : list
        A list of flux uncertainties in Jy.
    models : list
        A list of functions corresponding to models of the radio spectrum.
    names : 2D list
        A list of fitted parameter names corresponding to each model above.
    params : 2D list
        A list of fitted parameter values corresponding to each model above.
    param_errs : 2D list
        A list of uncertainties on the fitted parameters corresponding to each model above.
    rcs : list
        A list of reduced chi squared values corresponding to each model above.
    BICs : list
        A list of Bayesian Information Criteria (BIC) values corresponding to each model above.
    colours : list
        A list of colours corresponding to each model above.
    labels : list
        A list of labels corresponding to each model above.
    figname : string
        The filename to give the figure when writing to file.

    Keyword arguments:
    ------------------
    annotate : bool
        Annotate fit info onto figure.
    model_selection : string
        How to select models for plotting, based on the BIC values. Options are:

            'best' - only plot the best model.

            'all' - plot all models.

            'better' - plot each model better than the previous, chronologically."""

    #create SEDs directory if doesn't already exist
    if not os.path.exists('SEDs'):
        os.mkdir('SEDs')

    fig = plt.figure()
    ax = plt.subplot()

    #plot frequency axis 20% beyond range of values
    xlin = np.linspace(min(freqs) * 0.8, max(freqs) * 1.2, num=5000)
    plt.ylabel(r'Flux Density $S$ (mJy)')
    plt.xlabel(r'Frequency $\nu$ (GHz)')
    plt.xscale('log')
    plt.yscale('log')

    #adjust the tick values and add grid lines at minor tick locations
    subs = [1.0, 2.0, 5.0]
    ax.xaxis.set_major_locator(ticker.LogLocator(subs=subs))
    ax.yaxis.set_major_locator(ticker.LogLocator(subs=subs))
    ax.xaxis.set_minor_formatter(ticker.NullFormatter())
    ax.yaxis.set_minor_formatter(ticker.NullFormatter())
    ax.xaxis.set_major_formatter(ticker.FuncFormatter(ticks_format_freq))
    ax.yaxis.set_major_formatter(ticker.FuncFormatter(ticks_format_flux))
    ax.grid(b=True, which='minor', color='w', linewidth=0.5)

    #plot flux measurements
    plt.errorbar(freqs,
                 fluxes,
                 yerr=errs,
                 linestyle='none',
                 marker='.',
                 c='r',
                 zorder=15)

    best_bic = 0
    dBIC = 3
    offset = 0
    plotted_models = 0

    #plot each model
    for i in range(len(models)):
        ylin = models[i](xlin, *params[i])
        txt = "{0}:\n   {1}".format(labels[i],
                                    r'$\chi^2_{\rm red} = %.1f$' % rcs[i])

        #compare BIC values
        bic = BICs[i]
        if i > 0:
            dBIC = best_bic - bic
            if model_selection != 'best':
                txt += ', {0}'.format(r'$\Delta{\rm BIC} = %.1f$' % (dBIC))
        if dBIC >= 3:
            best_bic = bic

        #plot model if selected according to input
        if model_selection == 'all' or (model_selection == 'better' and dBIC >=
                                        3) or (model_selection == 'best'
                                               and BICs[i] == min(BICs)):

            plotted_models += 1
            plt.plot(xlin,
                     ylin,
                     c=colours[i],
                     linestyle='--',
                     zorder=i + 1,
                     label=labels[i])
            plt.legend(scatterpoints=1,
                       fancybox=True,
                       frameon=True,
                       shadow=True)
            txt += '\n'

            #add each fitted parameter to string (in LaTeX format)
            for j, param in enumerate(names[i]):
                units = ''
                tokens = param.split('_')
                if len(tokens[0]) > 1:
                    tokens[0] = "\\" + tokens[0]
                if len(tokens) > 1:
                    param = r'%s_{\rm %s}' % (tokens[0], tokens[1])
                else:
                    param = tokens[0]
                val = params[i][j]
                err = param_errs[i][j]

                if param.startswith('S'):
                    units = 'Jy'
                    if val < 0.01:
                        val = val * 1e3
                        err = err * 1e3
                        units = 'mJy'
                elif 'nu' in param:
                    units = 'MHz'
                    if val > 100:
                        val = val / 1e3
                        err = err / 1e3
                        units = 'GHz'

                val = sig_figs(val)
                err = sig_figs(err)

                txt += '   ' + r'${0}$ = {1} $\pm$ {2} {3}'.format(
                    param, val, err, units) + '\n'

            #annotate all fit info if it will fit on figure
            if annotate and plotted_models <= 3:
                plt.text(offset,
                         0,
                         txt,
                         horizontalalignment='left',
                         verticalalignment='bottom',
                         transform=ax.transAxes)
                offset += 0.33

    #write figure and close
    plt.savefig('SEDs/{0}'.format(figname))
    plt.close()
Ejemplo n.º 35
0
#%%
'''test data 분석'''
Atest_ = Atest_.toarray().reshape(-1, PARAMS['keywords'], PARAMS['keywords'])[:, di[0], di[1]]

test_words = []
for i in range(len(Atest_)):
    test_words.append([keywords[w] for w in np.where(Atest_[i, :] == 1)[0]])
    
# test words save
with open("./result/test_words.txt", "w") as f:
    for w in test_words:
        f.write(' '.join(w) + '\n')

plt.figure(figsize=(20, 20))
for j in range(10):
    plt.subplot(5, 2, j+1)
    count = {x:0 for x in keywords}
    temp = sum(test_words[100*j:100*(j+1)], [])
    for i in range(len(temp)):
        count[temp[i]] = count.get(temp[i]) + 1
    plt.bar(range(len(count)), list(count.values()), align='center')
    plt.xticks(size = 20)
    plt.yticks(size = 20)
    plt.title('{}월'.format(j+1), fontsize=30)
    # plt.xticks(range(len(count)), list(count.keys()))
    plt.tight_layout() 
plt.savefig('./result/test_freq.png', 
            dpi=200, bbox_inches="tight", pad_inches=0.1)
plt.show()
#%%
# reconstruction
Ejemplo n.º 36
0
        self.plot_psf(U)


if __name__ == '__main__':

    # handy object for plotting the PSF
    psfplot = PSFPlot()

    # beta (diffraction-limited), N_beta = cpsf.czern.nk
    beta = np.zeros(psfplot.cpsf.czern.nk, dtype=np.complex)
    beta[0] = 1.0
    beta[5] = 1j*0.3
    beta[6] = -1j*0.3

    # plot the results
    nn, mm = 2, math.ceil(psfplot.fspace.size//2)
    p.figure(1)

    for fi, f in enumerate(psfplot.fspace):
        ax = p.subplot(nn, mm, fi + 1)

        # plot the psf
        psfplot.plot_beta_f(beta, fi)
        p.colorbar()

        # defocus in rad
        p.title('d={:.1f}'.format(enz.get_defocus(f)))

    p.tight_layout()
    p.show()
Ejemplo n.º 37
0
    # plt.figure()
    # librosa.display.waveplot(y=samples, sr=sampling_rate)
    # plt.xlabel("time (seconds) -->")
    # plt.ylabel("amplitude")
    # plt.show()

    D = librosa.stft(y)
    D_harmonic, D_percussive = librosa.decompose.hpss(D)

    # Pre-compute a global reference power from the input spectrum
    rp = np.max(np.abs(D))

    plt.figure(figsize=(12, 8))

    plt.subplot(3, 1, 1)
    librosa.display.specshow(librosa.amplitude_to_db(np.abs(D), ref=rp), y_axis='log')
    plt.colorbar()
    plt.title('Full spectrogram')

    plt.subplot(3, 1, 2)
    librosa.display.specshow(librosa.amplitude_to_db(np.abs(D_harmonic), ref=rp), y_axis='log')
    plt.colorbar()
    plt.title('Harmonic spectrogram')

    plt.subplot(3, 1, 3)
    librosa.display.specshow(librosa.amplitude_to_db(np.abs(D_percussive), ref=rp), y_axis='log',
                             x_axis='time')
    plt.colorbar()
    plt.title('Percussive spectrogram')
    plt.tight_layout()
Ejemplo n.º 38
0
    def __repr__(self):
        # TODO: Currently plotting the wrong number of facets.
        if self.facet_type=="grid":
            fig, axs = plt.subplots(self.n_high, self.n_wide,
                    sharex=True, sharey=True)
            plt.subplots_adjust(wspace=.05, hspace=.05)
        elif self.facet_type=="wrap":
            subplots_available = self.n_wide * self.n_high
            extra_subplots = subplots_available - self.n_dim_x

            fig, axs = plt.subplots(self.n_high, self.n_wide)
            for extra_plot in axs.flatten()[-extra_subplots:]:
                extra_plot.axis('off')

            # TODO: This isn't working
            plots = [None for i in range(self.n_dim_x)]
            for i in range(self.n_dim_x):
                idx = (i % self.n_high) * self.n_wide + (i % self.n_wide)
                plots[idx] = (i % self.n_wide, i % self.n_high)

            plots = [plot for plot in plots if plot is not None]
            plots = sorted(plots, key=lambda x: x[1] + x[0] * self.n_high + 1)

        else:
            fig, axs = plt.subplots(self.n_high, self.n_wide)

        plt.subplot(self.n_wide, self.n_high, 1)

        # Faceting just means doing an additional groupby. The
        # dimensions of the plot remain the same
        if self.facets:
            cntr = 0
            if len(self.facets)==2:
                for facets, frame in self.data.groupby(self.facets):
                    pos = self.facet_pairs.index(facets) + 1
                    plt.subplot(self.n_wide, self.n_high, pos)
                    for layer in self._get_layers(frame):
                        for geom in self.geoms:
                            callbacks = geom.plot_layer(layer)
                # This needs to enumerate all possibilities
                for pos, facets in enumerate(self.facet_pairs):
                    pos += 1
                    if pos <= self.n_high:
                        plt.subplot(self.n_wide, self.n_high, pos)
                        plt.table(cellText=[[facets[1]]], loc='top',
                                cellLoc='center', cellColours=[['lightgrey']])
                    if (pos % self.n_high)==0:
                        plt.subplot(self.n_wide, self.n_high, pos)
                        x = max(plt.xticks()[0])
                        y = max(plt.yticks()[0])
                        ax = axs[pos % self.n_high][pos % self.n_wide]
                        plt.text(x*1.025, y/2., facets[0],
                            bbox=dict(facecolor='lightgrey', color='black'),
                            fontdict=dict(rotation=-90, verticalalignment="center")
                        )
                    plt.subplot(self.n_wide, self.n_high, pos)

                # Handle the different scale types here (free|free_y|free_x|None) and 
                # also make sure that only the left column gets y scales and the bottom
                # row gets x scales
                scale_facet(self.n_wide, self.n_high, self.facet_pairs, self.facet_scales)

            else:
                for facet, frame in self.data.groupby(self.facets):
                    for layer in self._get_layers(frame):
                        for geom in self.geoms:
                            if self.facet_type=="wrap":
                                if cntr+1 > len(plots):
                                    continue
                                pos = plots[cntr]
                                if pos is None:
                                    continue
                                y_i, x_i = pos
                                pos = x_i + y_i * self.n_high + 1
                                plt.subplot(self.n_wide, self.n_high, pos)
                            else:
                                plt.subplot(self.n_wide, self.n_high, cntr)
                                # TODO: this needs some work
                                if (cntr % self.n_high)==-1:
                                    plt.tick_params(axis='y', which='both',
                                            bottom='off', top='off',
                                            labelbottom='off')
                            callbacks = geom.plot_layer(layer)
                            if callbacks:
                                for callback in callbacks:
                                    fn = getattr(axs[cntr], callback['function'])
                                    fn(*callback['args'])
                    #TODO: selective titles
                    plt.title(facet)
                    cntr += 1
        else:
            for layer in self._get_layers(self.data):
                for geom in self.geoms:
                    plt.subplot(1, 1, 1)
                    callbacks = geom.plot_layer(layer)
                    if callbacks:
                        for callback in callbacks:
                            fn = getattr(axs, callback['function'])
                            fn(*callback['args'])

        # Handling the details of the chart here; probably be a better
        # way to do this...
        if self.title:
            plt.title(self.title)
        if self.xlab:
            if self.facet_type=="grid":
                fig.text(0.5, 0.025, self.xlab)
            else:
                plt.xlabel(self.xlab)
        if self.ylab:
            if self.facet_type=="grid":
                fig.text(0.025, 0.5, self.ylab, rotation='vertical')
            else:
                plt.ylabel(self.ylab)
        if self.xmajor_locator:
            plt.gca().xaxis.set_major_locator(self.xmajor_locator)
        if self.xtick_formatter:
            plt.gca().xaxis.set_major_formatter(self.xtick_formatter)
            fig.autofmt_xdate()
        if self.xbreaks: # xbreaks is a list manually provided
            plt.gca().xaxis.set_ticks(self.xbreaks)
        if self.xtick_labels:
            plt.gca().xaxis.set_ticklabels(self.xtick_labels)
        if self.ytick_formatter:
            plt.gca().yaxis.set_major_formatter(self.ytick_formatter)
        if self.xlimits:
            plt.xlim(self.xlimits)
        if self.ylimits:
            plt.ylim(self.ylimits)
        if self.scale_y_reverse:
            plt.gca().invert_yaxis()
        if self.scale_x_reverse:
            plt.gca().invert_xaxis()

        # TODO: Having some issues here with things that shouldn't have a legend
        # or at least shouldn't get shrunk to accomodate one. Need some sort of
        # test in place to prevent this OR prevent legend getting set to True.
        if self.legend:
            if self.facets:
                if 1==2:
                    ax = axs[0][self.n_wide]
                    box = ax.get_position()
                    ax.set_position([box.x0, box.y0, box.width * 0.8, box.height])
                    cntr = 0
                    for ltype, legend in self.legend.items():
                        lname = self.aesthetics.get(ltype, ltype)
                        ax.add_artist(draw_legend(ax, legend, ltype, lname, cntr))
                        cntr += 1
            else:
                box = axs.get_position()
                axs.set_position([box.x0, box.y0, box.width * 0.8, box.height])
                cntr = 0
                for ltype, legend in self.legend.items():
                    if legend:
                        lname = self.aesthetics.get(ltype, ltype)
                        axs.add_artist(draw_legend(axs, legend, ltype, lname, cntr))
                        cntr += 1

        # TODO: We can probably get more sugary with this
        return "<ggplot: (%d)>" % self.__hash__()
Ejemplo n.º 39
0
def draw_mix_u(d, navi_u, tele_u, mix_u, gain):
    fig = plt.figure(figsize=(10, 3))
    gs = gridspec.GridSpec(1, 2, width_ratios=[1, 2])
    ax1 = plt.subplot(gs[0])
    ds = 0.4
    epsilon = 0.1
    dd = [-ds + i * 0.005 for i in range(200)]
    gg = [rho(ddi) / (rho(ddi) + rho(epsilon - ddi)) for ddi in dd]
    ax1.plot(dd,
             gg,
             color='k',
             linestyle='-',
             linewidth=3,
             label=r'$\kappa(\cdot)$')
    ax1.set_xlabel(r'$d_t-d_s(m)$', fontsize=20)
    ax1.legend(loc=(.55, .62),
               labelspacing=0.7,
               numpoints=3,
               handlelength=2.5,
               ncol=1,
               prop={'size': 15})
    ax1.set_xlim(dd[0], dd[-1])
    ax1.grid()
    ax2 = plt.subplot(gs[1])
    ax2.plot(d,
             navi_u,
             color='r',
             linestyle='-',
             linewidth=3,
             marker='o',
             mfc='r',
             fillstyle='full',
             markersize=6,
             label=r'$u_r$')
    ax2.plot(d,
             tele_u,
             color='g',
             linestyle='-',
             linewidth=3,
             marker='d',
             mfc='g',
             fillstyle='full',
             markersize=6,
             label=r'$u_h$')
    ax2.plot(d,
             mix_u,
             color='b',
             linestyle='-',
             linewidth=3,
             marker='>',
             mfc='b',
             fillstyle='full',
             markersize=6,
             label=r'$u$')
    ax2.plot(d,
             gain,
             color='k',
             linestyle='-',
             linewidth=3,
             marker='*',
             mfc='k',
             fillstyle='full',
             markersize=6,
             label=r'$\kappa$')
    ax2.set_xlabel(r'$d_t(m)$', fontsize=20)
    ax2.legend(loc=(.6, .62),
               numpoints=3,
               handlelength=2.0,
               ncol=2,
               prop={'size': 15})
    ax2.set_xlim(0, d[-1] + 0.01)
    ax2.grid()
    fig.tight_layout()
    plt.savefig('mix_u.pdf', bbox_inches='tight')
    return fig
Ejemplo n.º 40
0
ACF, J2 = [], []
energy = []
for step in range(nsteps):
    x = MATRIX(1, 1)
    x.set(0, 0, s1_energy[step])
    energy.append(x)
Tij, ACFij, uACFij, Wij, Jij, J2ij = infsp.recipe1(energy, params2)
ACF.append(ACFij)
J2.append(np.array(J2ij))

plt.figure(num=None,
           figsize=(3.21, 2.41),
           dpi=300,
           edgecolor='black',
           frameon=True)
plt.subplot(1, 1, 1)
plt.title('S1 Energy', fontsize=10)  # traj'+str(subtraj)+'', fontsize=10)
plt.xlabel('Times, fs', fontsize=10)
plt.ylabel('Energy, eV', fontsize=10)
plt.plot(range(nsteps), s1_energy, label="", linewidth=1)
plt.tight_layout()
#plt.ylim(0,3)
#plt.legend(fontsize=8, ncol=3, loc="lower left")
plt.savefig("S1.png", dpi=300)

plt.figure(num=None,
           figsize=(3.21, 2.41),
           dpi=300,
           edgecolor='black',
           frameon=True)
plt.subplot(1, 1, 1)
Ejemplo n.º 41
0
signal = macd.ewm(span=45).mean()
macdhist = macd - signal
df = df.assign(ema130=ema130, ema60=ema60, macd=macd, signal=signal, macdhist=macdhist).dropna()

df['number'] = df.index.map(mdates.date2num)
ohlc = df[['number','open','high','low','close']]

ndays_high = df.high.rolling(window=14, min_periods=1).max()
ndays_low = df.low.rolling(window=14, min_periods=1).min()

fast_k = (df.close - ndays_low) / (ndays_high - ndays_low) * 100
slow_d = fast_k.rolling(window=3).mean()
df = df.assign(fast_k = fast_k, slow_d = slow_d).dropna()

plt.figure(figsize=(9, 9))
p1 = plt.subplot(3,1,1)
plt.title('Triple Screen Trading (NCSOFT)')
plt.grid(True)
candlestick_ohlc(p1,ohlc.values, width=.6, colorup='red', colordown='blue')
p1.xaxis.set_major_formatter(mdates.DateFormatter('%X-%m'))
plt.plot(df.number, df['ema130'], color='c', label='EMA130')
for i in range(1,len(df.close)):
    if df.ema130.values[i-1] < df.ema130.values[i] and \
        df.slow_d.values[i-1] >= 20 and df.slow_d.values[i] < 20:
        plt.plot(df.number.values[i], 250000, 'r^')
    elif df.ema130.values[i-1] > df.ema130.values[i] and \
        df.slow_d.values[i-1] <= 80 and df.slow_d.values[i] > 80:
        plt.plot(df.number.values[i],250000,'bv')
plt.legend(loc='best')

p2=plt.subplot(3,1,2)
plt.figure()
plt.imshow(image, cmap=plt.cm.binary)
plt.colorbar()
plt.grid(False)
plt.show()

# %%
"""
Display the first 25 images from the *training set* and display the class name below each image. Verify that the data is in the correct format and we're ready to build and train the network.
"""

# %%
plt.figure(figsize=(10, 10))
for i, (image, label) in enumerate(test_dataset.take(25)):
    image = image.numpy().reshape((28, 28))
    plt.subplot(5, 5, i + 1)
    plt.xticks([])
    plt.yticks([])
    plt.grid(False)
    plt.imshow(image, cmap=plt.cm.binary)
    plt.xlabel(class_names[label])
plt.show()


model = tf.keras.Sequential([
    tf.keras.layers.Flatten(input_shape=(28, 28, 1)),
    tf.keras.layers.Dense(128, activation=tf.nn.relu),
    tf.keras.layers.Dense(10, activation=tf.nn.softmax)
])

"""
Ejemplo n.º 43
0
     'Epoch: %d' % (epoch + 1),
     'DiscriminatorA Loss= %f,DiscriminatorB Loss= %f, Generator Loss= %f, Avg Loss=%f'
     % (DA_loss_curr, DB_loss_curr, G_loss_curr, avg_loss))
 losses.append((DA_loss_curr, DB_loss_curr, G_loss_curr, avg_loss))
 # 100 에폭마다 변형되는 이미지 그림
 if (epoch + 1) % 100 == 0:
     samples_A = sess.run(X_BA, feed_dict={X_B: XB})
     samples_B = sess.run(X_AB, feed_dict={X_A: XA})
     # 도메인  A의 test 이미지
     f, axes = plt.subplots(figsize=(7, 7),
                            nrows=1,
                            ncols=2,
                            sharey=True,
                            sharex=True)
     for ii in range(2):
         plt.subplot(1, 2, ii + 1)
         plt.suptitle('Domain A')
         plt.imshow(XA[ii].reshape(45, 40), 'Greys_r')
     # G_AB(X_A) 결과
     f, axes = plt.subplots(figsize=(7, 7),
                            nrows=1,
                            ncols=2,
                            sharey=True,
                            sharex=True)
     for ii in range(2):
         plt.subplot(1, 2, ii + 1)
         plt.suptitle('Result of G_AB')
         plt.imshow(samples_B[ii].reshape(40, 45), 'Greys_r')
     # 도메인  B의 test 이미지
     f, axes = plt.subplots(figsize=(7, 7),
                            nrows=1,
Ejemplo n.º 44
0
def DrawFig( figureFile, distance, leftIden, rigthIden, aveIden, nr, aa, bb, test ) : 

    fig = plt.figure( num=None, figsize=(16, 18), facecolor='w', edgecolor='k' )
    plt.subplot(321)

    """
    from matplotlib.colors import LogNorm
    plt.hist2d(test[:,4], test[:,5], bins=50, norm=LogNorm())
    plt.plot(test[:,0], test[:,1], 'co')
    """
    plt.title('Distance distribution', fontsize=16)
    plt.plot(distance[:,0] , 100 * distance[:,1]/np.sum(distance[:,1])  , 'ro-' )
    plt.xlabel('The breakpoints of varints span on assemble sequence(%)', fontsize=16)
    plt.ylabel('% of Number', fontsize=16)

    plt.subplot(322)
    plt.title('Left Side', fontsize=16)
    plt.plot(leftIden[:,0] , leftIden[:,2]/np.sum(leftIden[:,1])  , 'go-' )
    plt.axis([0,100,0.0,1.0])
    plt.xlabel('Left Side Identity of varints(<=%)', fontsize=16)
    plt.ylabel('% of Accumulate', fontsize=16)

    plt.subplot(323)
    plt.title('Right Side', fontsize=16)
    plt.plot(rigthIden[:,0], rigthIden[:,2]/np.sum(rigthIden[:,1]), 'bo-' )
    plt.axis([0,100,0.0,1.0])
    plt.xlabel('Right Side Identity of varints(<=%)', fontsize=16)
    plt.ylabel('% of Accumulate', fontsize=16)

    plt.subplot(324)
    plt.title('Averge', fontsize=16)
    plt.plot(aveIden[:,0]  , aveIden[:,2]/np.sum(aveIden[:,1])    , 'co-' )
    plt.axis([0,100,0.0,1.0])
    plt.xlabel('Averge Identity of varints(<=%)', fontsize=16)
    plt.ylabel('% of Accumulate', fontsize=16)

    plt.subplot(325)
    plt.title('N Ratio', fontsize=16)
    plt.plot(nr[:,0], nr[:,2]/np.sum(nr[:,1]), 'yo-' )
    plt.axis([0,5,0.0,1.0])
    plt.xlabel('N Ratio of varints\' regions(>=%)', fontsize=16)
    plt.ylabel('% of Accumulate', fontsize=16)

    plt.subplot(6,2,10)
    plt.plot(aa[:,0], aa[:,2]/np.sum(aa[:,1]), 'mo-' )
    plt.axis([0,100,0.0,1.0])
    plt.xlabel('Perfect Depth(<=)', fontsize=12)
    plt.ylabel('% of Accumulate', fontsize=16)

    plt.subplot(6,2,12)
    plt.plot(bb[:,0], bb[:,2]/np.sum(bb[:,1]), 'ko-' )
    plt.axis([0,100,0.0,1.0])
    plt.xlabel('Both ImPerfect Depth(<=)', fontsize=12)
    plt.ylabel('% of Accumulate', fontsize=16)

    fig.savefig(figureFile + '.png')
Ejemplo n.º 45
0
	images /= 255
	return images, labels
	   
train_dataset =  train_dataset.map(normalize)
test_dataset  =  test_dataset.map(normalize)

for image, label in test_dataset.take(1):
	break
image = image.numpy().reshape((28,28))

# 显示前25张图片,在每张图片下显示类别
plt.figure(figsize=(10,10))
i = 0
for (image, label) in test_dataset.take(25):
	image = image.numpy().reshape((28,28))
	plt.subplot(5,5,i+1)
	plt.xticks([])
	plt.yticks([])
	plt.imshow(image, cmap=plt.cm.binary)
	plt.xlabel(class_names[label])
	i += 1
plt.savefig('./Clothing2.png')
plt.show()







Ejemplo n.º 46
0
    for step in range(1000000):

        org_im, im, em = q.get()
        ls = sess.run(train_op, feed_dict={image: im, label: em})
        print(ls)
        if step % 1 == 0:
            ls, er, out, stp, v2, v5, v1 = sess.run(
                [train_op, error, out_put, global_step, dv2, dv5, con3],
                feed_dict={
                    image: im,
                    label: em
                })

        if step % 1 == 0:
            for s in range(8):
                plt.subplot(221)
                plt.title('original')
                plt.imshow(org_im[s, :, :, :], aspect="auto")

                plt.subplot(222)
                plt.title('step1')
                plt.imshow(v1[s, :, :, 0], aspect="auto", cmap='gray')

                plt.subplot(223)
                plt.title('step2')
                plt.imshow(v2[s, :, :, 0], aspect="auto", cmap='gray')

                plt.subplot(224)
                plt.title('final')
                plt.imshow(out[s, :, :, 0], aspect="auto", cmap='gray')
                plt.show()
Ejemplo n.º 47
0
"""
===============
Demo Gridspec02
===============

"""
import matplotlib.pyplot as plt
from matplotlib.gridspec import GridSpec


def make_ticklabels_invisible(fig):
    for i, ax in enumerate(fig.axes):
        ax.text(0.5, 0.5, "ax%d" % (i+1), va="center", ha="center")
        ax.tick_params(labelbottom=False, labelleft=False)


fig = plt.figure()

gs = GridSpec(3, 3)
ax1 = plt.subplot(gs[0, :])
# identical to ax1 = plt.subplot(gs.new_subplotspec((0, 0), colspan=3))
ax2 = plt.subplot(gs[1, :-1])
ax3 = plt.subplot(gs[1:, -1])
ax4 = plt.subplot(gs[-1, 0])
ax5 = plt.subplot(gs[-1, -2])

fig.suptitle("GridSpec")
make_ticklabels_invisible(fig)

plt.show()
Ejemplo n.º 48
0
import cv2
import numpy as np
import matplotlib
matplotlib.use('TkAgg')
from matplotlib import pyplot as plt

img = cv2.imread('line5.jpeg', 0)

# Output dtype = cv2.CV_8U
sobelx8u = cv2.Sobel(img, cv2.CV_8U, 1, 0, ksize=3)

# Output dtype = cv2.CV_64F. Then take its absolute and convert to cv2.CV_8U
sobelx64f = cv2.Sobel(img, cv2.CV_64F, 2, 0, ksize=3)
abs_sobel64f = np.absolute(sobelx64f)
sobel_8u = np.uint8(abs_sobel64f)

plt.subplot(1, 3, 1), plt.imshow(img, cmap='gray')
plt.title('Original'), plt.xticks([]), plt.yticks([])
plt.subplot(1, 3, 2), plt.imshow(sobelx8u, cmap='gray')
plt.title('Sobel 1'), plt.xticks([]), plt.yticks([])
plt.subplot(1, 3, 3), plt.imshow(sobel_8u, cmap='gray')
plt.title('Sobel 2'), plt.xticks([]), plt.yticks([])

plt.show()
cv2.waitKey(0)
x_sample = mnist.test.next_batch(100)[0]
x_reconstruct = vae.reconstruct(x_sample)

training_epochs=10
#plotting reconstruct data
x = np.arange(0,training_epochs,1)
plt.title("Cost Graph")
plt.plot(x, new_cost)
plt.show()



#plotting the images before and after reconstruction
plt.figure(figsize=(8, 12))
for i in range(5):
    plt.subplot(5, 2, 2*i + 1)
    plt.imshow(x_sample[i].reshape(28, 28), vmin=0, vmax=1, cmap="gray")
    plt.title("Test input")
    plt.colorbar()
    plt.subplot(5, 2, 2*i + 2)
    plt.imshow(x_reconstruct[i].reshape(28, 28), vmin=0, vmax=1, cmap="gray")
    plt.title("Reconstruction")
    plt.colorbar()
plt.tight_layout()
plt.show()


#building the network to understand the latent space in 2d latent space

network_architecture = \
    dict(n_hidden_recog_1=300, # 1st layer encoder neurons
Ejemplo n.º 50
0
file = open('./weights.txt', 'w')  # 参数提取
for v in model.trainable_variables:
    file.write(str(v.name) + '\n')
    file.write(str(v.shape) + '\n')
    file.write(str(v.numpy()) + '\n')
file.close()

###############################################    show   ###############################################

# 显示训练集和验证集的acc和loss曲线
acc = history.history['sparse_categorical_accuracy']
val_acc = history.history['val_sparse_categorical_accuracy']

loss = history.history['loss']
val_loss = history.history['val_loss']

plt.figure(figsize=(8, 8))
plt.subplot(1, 2, 1)
plt.plot(acc, label='Training Accuracy')
plt.plot(val_acc, label='Validation Accuracy')
plt.legend(loc='lower right')
plt.title('Training and Validation Accuracy')

plt.subplot(1, 2, 2)
plt.plot(loss, label='Training Loss')
plt.plot(val_loss, label='Validation Loss')
plt.legend(loc='upper right')
plt.title('Training and Validation Loss')
plt.show()
Ejemplo n.º 51
0
def run_optical_flow(filepath_ind: int, param: int=500,
                     display: bool=True):

    frame_1 = cv2.imread(filepaths[filepath_ind] + 'frame1.png')[:, :, :3]
    frame_2 = cv2.imread(filepaths[filepath_ind] + 'frame2.png')[:, :, :3]

    frame_1_gray = cv2.cvtColor(frame_1, cv2.COLOR_RGB2GRAY)
    frame_2_gray = cv2.cvtColor(frame_2, cv2.COLOR_RGB2GRAY)

    # Convert to grayscale for analysis
    frame_1 = frame_1_gray.astype(float)
    frame_2 = frame_2_gray.astype(float)

    start_time = default_timer()

    if display:
        # Plot the images
        plt.figure()
        plt.imshow(frame_1, 'gray')
        plt.figure()
        plt.imshow(frame_2, 'gray')

    # Initialize kernels for finding partial derivatives of the image
    kernelx = np.array([[-1, 1], [-1, 1]])
    kernely = np.array([[-1, -1], [1, 1]])
    kernelt_1 = np.array([[1, 1], [1, 1]])
    kernelt_2 = np.array([[-1, -1], [-1, -1]])
    Ix = signal.convolve2d(frame_1, kernelx, mode='same')
    Iy = signal.convolve2d(frame_2, kernely, mode='same')
    It = signal.convolve2d(frame_1, kernelt_1, mode='same') + signal.convolve2d(frame_2, kernelt_2, mode='same')

    # Create empty u and v matrices for recursion
    u = np.zeros_like(frame_1)
    v = np.zeros_like(frame_1)

    # Alpha is the regulatory parameter
    alpha_vals = np.array([1e-10, 1e-9, 1e-8, 1e-7, 1e-6, 1e-5, 1e-4, 5e-4, 1e-3, 5e-3, 1e-2, 5e-2])
    RMSE = np.zeros_like(alpha_vals)
    count = 0
    for alpha in alpha_vals:
        u, v = horn_schunck(u, v, Ix, Iy, It, param, alpha)
        end_time = default_timer()

        # Determine run time
        duration = end_time - start_time
        clock = [int(duration // 60), int(duration % 60)]
        print('Flow estimation time was {} minutes and {} seconds'
                .format(*clock))

        # Downsample for better visuals and results
        stride = 10
        m, n = frame_1.shape
        x, y = np.meshgrid(range(n), range(m))
        x = x.astype('float64')
        y = y.astype('float64')

        # Downsampled u and v
        u_ds = u[::stride, ::stride]
        v_ds = v[::stride, ::stride]

        # Coordinates for downsampled u and v
        x_ds = x[::stride, ::stride]
        y_ds = y[::stride, ::stride]

        # Estimated flow
        estimated_flow = np.stack((u, v), axis=2)

        # Read file for ground truth flow
        ground_truth_flow = read_flow_file(filepaths[filepath_ind] + 'flow1_2.flo')
        u_gt_orig = ground_truth_flow[:, :, 0]
        v_gt_orig = ground_truth_flow[:, :, 1]
        u_gt = np.where(np.isnan(u_gt_orig), 0, u_gt_orig)
        v_gt = np.where(np.isnan(v_gt_orig), 0, v_gt_orig)

        # Downsampled u_gt and v_gt
        u_gt_ds = u_gt[::stride, ::stride]
        v_gt_ds = v_gt[::stride, ::stride]

        RMSE[count] = np.sqrt(np.sum(((u_ds - u_gt_ds) ** 2) + ((v_ds - v_gt_ds) ** 2)))
        print(RMSE)
        count = count + 1
    plt.figure()
    plt.semilogx(alpha_vals, RMSE)
    plt.xlabel('Lambda Values')
    plt.ylabel('Root Mean Squared Error')
    plt.title('RMSE for Range of Lambda Values')

    if display:
        # Plot the optical flow field
        plt.figure()
        plt.subplot(1, 2, 1)
        plt.imshow(frame_2, 'gray')
        plt.quiver(x_ds, y_ds, u_ds, v_ds, color='r')
        plt.title('Estimated', fontsize='x-small')
        plt.subplot(1, 2, 2)
        plt.imshow(frame_2, 'gray')
        plt.quiver(x_ds, y_ds, u_gt_ds, v_gt_ds, color='r')
        plt.title('Ground Truth', fontsize='x-small')

    # Normalization for metric computations
    normalize = lambda im: (im - np.min(im)) / (np.max(im) - np.min(im))
    un = normalize(u)
    un_gt = normalize(u_gt)
    un_gt[np.isnan(u_gt_orig)] = 1
    vn = normalize(v)
    vn_gt = normalize(v_gt)
    vn_gt[np.isnan(v_gt_orig)] = 1

    if display:
        plt.show()

    return clock
fname = path + "/calibration_result/calibration_parameters.txt"
print(fname)
with open(fname, "w") as f:
    f.write("{'ret':"+str(ret)+", 'mtx':"+str(list(mtx))+', "dist":'+str(list(dist))+'}')
    f.close()
np.savetxt(imagesFolder+"calib_mtx_webcam.csv", mtx)
np.savetxt(imagesFolder+"calib_dist_webcam.csv", dist)


#imagesFolder = "data_image_detect/"

i=24 # select image id
plt.figure()
frame = cv2.imread(imagesFolder + "image_100.jpg".format(i))
img_undist = cv2.undistort(frame,mtx,dist,None)
plt.subplot(211)
plt.imshow(frame)
plt.title("Raw image")
plt.axis("off")
plt.subplot(212)
plt.imshow(img_undist)
plt.title("Corrected image")
plt.axis("off")
plt.show()

##Use of camera calibration to estimate 3D translation and rotation of each marker on a scene
imagesFolder = "src/ar_markers/src/data_image_detect/"
frame = cv2.imread(imagesFolder + "image_10.png")
plt.figure()
plt.imshow(frame)
plt.show()
# -*- coding: utf-8 -*-
"""
Created on Tue Jul 17 18:04:55 2018

  李立宗
"""

import numpy as np
import cv2
import matplotlib.pyplot as plt
img = cv2.imread(r'../image/lena.bmp',0)
dft = cv2.dft(np.float32(img),flags = cv2.DFT_COMPLEX_OUTPUT)
dftShift = np.fft.fftshift(dft)
result = 20*np.log(cv2.magnitude(dftShift[:,:,0],dftShift[:,:,1]))
plt.subplot(121),plt.imshow(img, cmap = 'gray')
plt.title('original'),plt.axis('off')
plt.subplot(122),plt.imshow(result, cmap = 'gray')
plt.title('result'), plt.axis('off')
plt.show()
print(dft)
Ejemplo n.º 54
0
import cv2
import numpy as np
from matplotlib import pyplot as plt

img = cv2.imread('inu.jpg',0)
f = np.fft.fft2(img)
fshift = np.fft.fftshift(f)
magnitude_spectrum = 20*np.log(np.abs(fshift))

plt.subplot(121),plt.imshow(img, cmap = 'gray')
plt.title('Input Image'), plt.xticks([]), plt.yticks([])
plt.subplot(122),plt.imshow(magnitude_spectrum, cmap = 'gray')
plt.title('Magnitude Spectrum'), plt.xticks([]), plt.yticks([])
plt.savefig("figure1.png")
plt.show()
Ejemplo n.º 55
0
from handout import GaussianMixture1D
import scipy.stats as ss
np.random.seed(0)
gm1d = GaussianMixture1D(mode_range=(0, 50))
def trueDistributionCopied():
    data = gm1d.sample([2000])
    min_range = min(gm1d.modes) - 3 * gm1d.std_range[1]
    max_range = max(gm1d.modes) + 3 * gm1d.std_range[1]
    xs = np.linspace(min_range, max_range, 2000)
    ys = np.zeros_like(xs)
    for l, s, w in zip(gm1d.modes, gm1d.stds, gm1d.weights):
        ys += ss.norm.pdf(xs, loc=l, scale=s) * w
    plt.plot(xs, ys)
    
ax = plt.subplot(321)
knn(200,1)
trueDistribution()

ax = plt.subplot(322)
knn(200,6)
trueDistribution()

ax = plt.subplot(323)
knn(200,15)
trueDistribution()

ax = plt.subplot(324)
knn(200,30)
trueDistribution()
Ejemplo n.º 56
0
def graph_compare(df1, df2, freq, name1, name2, DFs=None):

	t1 = []
	for d in df1['t']:
		t1.append(md.date2num(datetime.datetime.fromtimestamp(d/1000)))
	t2 = []
	for d in df2['t']:
		t2.append(md.date2num(datetime.datetime.fromtimestamp(d/1000)))
	
	fig = plt.figure(figsize=(12, 9))
	gs = gridspec.GridSpec(3, 1, height_ratios=[1, 4, 1])
	plt.rc('axes', grid=True)
	
	ax2 = plt.subplot(gs[2])
	ax2.set_title("Volume")
	ax2.set_xlim(min(t1 + t2) - 100, max(t1 + t2) + 100)
	
	ax0 = plt.subplot(gs[0], sharex=ax2)
	ax0.set_title("spread")
	ax1 = plt.subplot(gs[1], sharex=ax2)
	ax1.set_title("px")
	
	ax0.fill_between(t1, 0, df2['close'] - df1['close'], where=df2['close'] >= df1['close'], color='b', alpha=0.5)
	ax0.fill_between(t1, 0, df2['close'] - df1['close'], where=df2['close'] <= df1['close'], color='y', alpha=0.5)
	
	#plot the 1st line
	ax1.plot(t1, df1['close'], color = 'y', linewidth=2)
	
	#plot the other line
	if name1[:3] == name2[:3]:
		ax1.plot(t2, df2['close'], color = 'b', linewidth=2)
	else:
		ax1.set_ylabel(name1, color='y')
		
		axt = ax1.twinx()
		axt.plot(t2, df2['close'], color = 'b', linewidth=2)
		axt.set_ylabel(name2, color='b')		
	
	if freq > 10:
		M_fmt = md.DateFormatter('%Y %b %d', tz=pytz.timezone('Asia/Taipei'))
		m_fmt = md.DateFormatter('%b %d', tz=pytz.timezone('Asia/Taipei'))
	elif freq > 2:
		M_fmt = md.DateFormatter('%b %d', tz=pytz.timezone('Asia/Taipei'))
		m_fmt = md.DateFormatter('%H:%M', tz=pytz.timezone('Asia/Taipei'))
	else:
		M_fmt = md.DateFormatter('%H:%M', tz=pytz.timezone('Asia/Taipei'))
		m_fmt = md.DateFormatter('%H:%M:%S', tz=pytz.timezone('Asia/Taipei'))
	
	ax2.xaxis.set_major_formatter(M_fmt)
	ax2.xaxis.set_minor_formatter(m_fmt)
	ax2.set_xlabel('Yellow: ' + name1 + ' | Blue:' + name2)
	ax2.fill_between(t1, 0, df1['v'], color='y', alpha=0.5)
	ax2.fill_between(t2, 0, df2['v'], color='b', alpha=0.5)
	fig.autofmt_xdate()
	
	if DFs is not None:
		
		if len(DFs) < 10:		
			print "v1"
			for df in DFs:
				ax1.plot(t1, df['close'])
		else:
			print "v2"
			ax1.plot(t1, DFs['close'])
			
	fig.savefig(name1.replace('/','_') + '_'	+ name2.replace('/','_') + '.png')
	ax2.xaxis_date()
	ax2.autoscale_view()
	plt.show()
import wave
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns

signal_wave = wave.open('../wavs/split-10/ulrichbessler/dievorstadtkrokodile/8.wav', 'rb')
sample_rate = signal_wave.getframerate()
sig = np.frombuffer(signal_wave.readframes(signal_wave.getnframes()), dtype=np.int16)

sig = sig[:]

plt.figure(figsize=(12, 7))
sns.set_style('darkgrid')
plt.subplots_adjust(hspace=0.3)
plt.suptitle('Waveform and spectrogram of audio sample', y=0.95)

plot_a = plt.subplot(211)
plot_a.plot(sig)
plot_a.set_xlabel('sample steps')
plot_a.set_ylabel('energy')
plt.xlim(0, 160000)

plot_b = plt.subplot(212)
plot_b.specgram(sig, NFFT=1024, Fs=sample_rate, noverlap=900, cmap='viridis')
plot_b.set_xlabel('seconds')
plot_b.set_ylabel('frequency')
plt.xlim(0, 10)

plt.savefig(f'../plots/waveform-spectrogram.png', dpi=300)
plt.close()
Ejemplo n.º 58
0
        show_images(images, 1, titles, label_name[y_training[index]])

    elif menu_choice == '4':
        #Scatter plot PC transformed data

        if len(X_training) == 0:
            X_training, y_training = load_dataset()

        #set scatter plot variable
        color_dict = ['yellow', 'red', 'magenta', 'blue']

        #f, axarr = plt.subplots(3, sharex=True, sharey=False)
        plt.figure(1, figsize=(30, 10))
        #scatter plot data first 2 PC
        i = 0
        ax1 = plt.subplot(131)
        ax1.set_title('Scatter 2PC', ha='center')
        for name in label_name:
            class_index = label_name.index(name)
            #select all the class data from the train set and test set
            X_training_sep = X_training[y_training == class_index].copy()
            trainingdata_mean = []
            trainingdata_std = []
            X_train_pca, X_pca_transf = compute_first_PC(X_training_sep, 2)
            ax1.scatter(X_pca_transf[:, 0],
                        X_pca_transf[:, 1],
                        c=color_dict[i])
            i += 1

        #scatter plot data 3 to 4 PC
        i = 0
Ejemplo n.º 59
0
def val():
    # setup device
    device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
    print("device = {}".format(device))
    # load data

    demo_path = os.path.join("demo")
    if not os.path.isdir(demo_path):
        os.mkdir(demo_path)

    imgs_path_list = img_loader(demo_path)

    # set model
    pretrain_model = VGGNet(requires_grad=True, remove_fc=True)
    model = FCNs(pretrained_net=pretrain_model, n_class=n_class)
    print("model loading success...")

    # set model running devices
    model.to(device)
    model = torch.nn.DataParallel(model, device_ids=range(torch.cuda.device_count()))
    print("usable gpu num: {}".format(torch.cuda.device_count()))

    # load checkpoints
    load_ckpt_path = os.path.join("checkpoints",load_ckpt_name)

    if torch.cuda.is_available():
        checkpoint = torch.load(load_ckpt_path)
    else:
        checkpoint = torch.load(load_ckpt_path, map_location='cpu')

    model.load_state_dict(checkpoint['model_state'])
    last_best_iou = checkpoint['best_iou']
    start_epoch = checkpoint['epoch']

    print('Checkpoint resume success... last iou:{:.4%}'.format(last_best_iou))
    time_s = time.time()
    model.eval()
    with torch.no_grad():

        images = []
        for i, img_path in zip(range(len(imgs_path_list)), imgs_path_list):
            #image = plt.imread(img_path)
            image = Image.open(img_path)
            image = image.resize((1024, 512), Image.ANTIALIAS)
            image = np.array(image) / 255.
            image = image[:,:,::-1]  # RGB => BGR
            images.append(image)

        images = np.array(images,dtype=np.float32)
        images = images.transpose([0,3,1,2])
        images_tensor = torch.tensor(images, dtype=torch.float32)
        images_tensor.to(device)
        outputs_val = model(images_tensor)
        pred = outputs_val.data.max(1)[1].cpu().numpy()

        for i in range(len(imgs_path_list)):
            plt.figure(i)
            plt.subplot(2,2,1)
            plt.title("Origin image")
            plt.imshow(images[i].transpose([1,2,0])[:,:,::-1])

            rgb_img = index2color(pred[i, :, :])

            plt.subplot(2,2,2)
            plt.title("Semantic Segmentation Predict, mIoU:{:.2%}".format(last_best_iou))
            plt.imshow(rgb_img.astype(np.int))

            plt.subplot(2,2,3)
            # show color2class bar
            range_cmap = [[i for i in range(n_class)]]
            # 自定义colormap
            c_map = mpl.colors.LinearSegmentedColormap.from_list('cmap', np.array(get_color_index()[:n_class]) / 255., 256)
            plt.imshow(range_cmap, cmap=c_map)
            plt.xticks(range_cmap[0],
                       ['Void', 'Road', 'Construction', 'Traffic light', 'Nature', 'Sky', 'Person', 'Vehicle'],
                       rotation=50)
        print("time used per image:{:.3}s ".format((time.time() - time_s)/len(imgs_path_list)))
        plt.show()
Ejemplo n.º 60
0
pars = ['STATURE', 'BIDELTOID_BRTH', 'THUMB-TIP_REACH']

orioncol = 'orange'
isscol = 'coral'

# Percentile limits (5th percentile Japanese Female, 95th percentile American male)
jf = np.array([[61.8, 15.3, 28.2],
				[1.9454,	0.7903,	1.5198]])
am = np.array([[70.8,	19.3,	32.1],
				[2.4318,	1.0335,	1.5806]])

ansur = [ansur_men, ansur_women]
plt.figure(figsize=(15, 15))
for i in range(3):
    for j in range(2):
		plt.subplot(3, 2, 2*(i+1)+j-1)
		anth = [df.crew_height, df.crew_shoulder, df.crew_thumb][i][subcat_gender.ix[:, j+1]]
		anth.hist(normed=True, label='Test Data')
		(ansur[j][pars[i]]/25.4).hist(normed=True, histtype='step', lw=3, label='ANSUR Ref. Data')
		plt.title(['Male', 'Female'][j]+' '+['Stature', 'Bideltoid Breadth', 'Thumb-tip Reach'][i])
		stat, pval=   stats.ttest_ind(anth, ansur[j][pars[i]]/25.4)
		bias = anth.mean()-(ansur[j][pars[i]]/25.4).mean()
		plt.annotate('Bias: %3.1f $\pm$ %3.1f in \np-value: %3.2f' % (bias, bias/stat, pval), (1, 1),
					xycoords='axes fraction', va='top', ha='right', bbox=dict(boxstyle="round", lw=0, fc='white', alpha=0.75))

		plt.xlabel("in")
		plt.ylabel("Density (in$^{-1}$)")
		if 2*(i+1)+j-1==1:
			plt.legend(loc=2, fontsize='small')
		plt.subplots_adjust(hspace=0.5, wspace=0.3, left=0.1)
        #mark orion and iss limits