Ejemplo n.º 1
0
def EnhanceContrast(g, r=3, op_kernel=15, silence=True):
    
    kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE,(op_kernel,op_kernel))
    opening = cv2.morphologyEx(g, cv2.MORPH_OPEN, kernel)
    
    g_copy = np.asarray(np.copy(g), dtype=np.float)

    m_f = np.mean(opening)
        
    u_max = 245; u_min = 10; t_min = np.min(g); t_max = np.max(g)

    idx_gt_mf = np.where(g_copy > m_f)
    idx_lt_mf = np.where(g_copy <= m_f)

    g_copy[idx_gt_mf] = -0.5 * ((u_max-u_min) / (m_f-t_max)**r) * (g_copy[idx_gt_mf]-t_max)**r + u_max
    g_copy[idx_lt_mf] = 0.5 * ((u_max-u_min) / (m_f-t_min)**r) * (g_copy[idx_lt_mf]-t_min)**r + u_min 

    if silence == False:
        plt.subplot(1,2,1)
        plt.imshow(g, cmap='gray')
        plt.title('Original image')
        plt.subplot(1,2,2)
        plt.imshow(g_copy, cmap='gray')
        plt.title('Enhanced image')
        plt.show()
        
    return g_copy
Ejemplo n.º 2
0
 def plot_bernoulli_matrix(self, show_npfs=False):
   """
   Plot the heatmap of the Bernoulli matrix 
   @self
   @show_npfs - Highlight NPFS detections [Boolean] 
   """
   matrix = self.Bernoulli_matrix
   if show_npfs == False:
     plot = plt.imshow(matrix)
     plot.set_cmap('hot')
     plt.colorbar()
     plt.xlabel("Bootstraps")
     plt.ylabel("Feature")
     plt.show()
   else:
     for i in self.selected_features:
       for k in range(len(matrix[i])):
         matrix[i,k] = .5
     plot = plt.imshow(matrix)
     plot.set_cmap('hot')
     plt.xlabel("Bootstraps")
     plt.ylabel("Feature")
     plt.colorbar()
     plt.show()
   return None
Ejemplo n.º 3
0
def plot_ohlc(df, maDay=5, maType='simple', **kwarg):
    """
    df: pandas DataFrame
        generated from yahoo_finance or it need to have these
        five columns:
        'Open', 'High', 'Low', 'Close', 'Volume'
    maDay: int
        number of days to do moving average
    maType: string
        'simple' or "exp"
    """
    # set default and unpack kwarg
    opt = {
        "title" : "Historical data",
        "xlabel" : "",
        "ylabel" : "Price",
        "lowerVolume" : 0,
        'colorup' : 'r',
        'colordown' : 'g'
    }
    opt.update(kwarg)
    
    # filter days when the market is not open.
    df = df[df['Volume']>opt['lowerVolume']].copy()
    
    # initialise figures
    fig, (ax1, ax2) = plt.subplots(nrows=2, sharex=True, figsize=(8,8))
    # adjust plot sizes
    l1, b1, w1, h1 = 0.10, 0.30, 0.85, 0.60 # top plot
    l2, b2, w2, h2 = 0.10, 0.10, 0.85, 0.20 # bottom plot
    ax1.set_position([l1, b1, w1, h1])
    ax2.set_position([l2, b2, w2, h2])

    # convert to mdates and plot volumes
    df['mdates'] = map(lambda date: mdates.date2num(date), df.index.to_pydatetime())
    df.plot(x='mdates', y='Volume', ax=ax2, legend=False, ls='steps')
    ax2.set_yscale("log")
    ax2.set_ylabel("Volume")
    ax2.xaxis.set_major_formatter(mdates.DateFormatter('%d\n%h\n%Y'))
    
    # plot candlesticks
    sticks = candlestick_ohlc(
               ax1, df[['mdates', 'Open', 'High', 'Low', 'Close']].values,
               colorup=opt['colorup'], colordown=opt['colordown'],
               width=0.8, alpha=0.7)
    
    # create medium price
    df['median'] = df[['Open', 'High', 'Low', 'Close']].median(axis=1)
    df.plot(x='mdates', y='median', ax=ax1, kind='scatter', c='k', marker='_')
    # moving average
    maLabel = "{:d}D Moving Average".format(maDay)
    df['MA'] = moving_average(df['median'], maDay, maType) # true MA
    df[maLabel] = df['MA']+df['median'].mean()*0.05
    df.plot(x='mdates', y=maLabel, ax=ax1, c='m')
    
    # set title and other stuff
    ax1.set_title(opt["title"])
    ax1.set_ylabel(opt["ylabel"])
    ax2.set_xlabel(opt["xlabel"])
    plt.show()
Ejemplo n.º 4
0
def plot_cone():
    '''
    '''
    from base import plot as pf

    c = cone_hc()
    stimulus,  response = c.simulate()

    fig  = plt.figure()
    fig.set_tight_layout(True)
    #ax1 = fig.add_subplot(211)
    ax2 = fig.add_subplot(111)
    pf.AxisFormat()
    #pf.TufteAxis(ax1,  ['left'],  [5,  5])
    pf.TufteAxis(ax2,  ['left',  'bottom'],  [5,  5])

    #ax1.semilogy(stimulus,  'k')
    ax2.plot(response,  'k')
    ax2.plot((stimulus / np.max(stimulus)) * 31, 'k')

    #ax1.set_xlim([0, 2000])
    ax2.set_xlim([0, 3000])
    ax2.set_ylim([30, 40])
    #ax1.set_ylabel('luminance (td)')
    ax2.set_ylabel('normalized response')
    ax2.set_xlabel('time')
    plt.show()
Ejemplo n.º 5
0
def display_grid(grid, **kwargs):
    fig = plt.figure()
    plt.axes().set_aspect('equal')

    if kwargs.get('mark_core_cells', True):
        core_cell_coords = grid._cell_nodes[1:-1, 1:-1]
        cellx, celly = core_cell_coords[:, :, 0], core_cell_coords[:, :, 1]
        plt.plot(cellx, celly, '-o', np.transpose(cellx), np.transpose(celly), '-o', color='red')

    if kwargs.get('mark_boundary_cells', True):
        boundary_cell_coords = grid._cell_nodes[0, :], \
                               grid._cell_nodes[-1, :], \
                               grid._cell_nodes[1:-1, 0], \
                               grid._cell_nodes[1:-1, -1]

        for coords in boundary_cell_coords:
            plt.plot(coords[:, 0], coords[:, 1], '-x', color='blue')

    if kwargs.get('show', False):
        plt.show()

    f = BytesIO()
    plt.savefig(f)

    return f
Ejemplo n.º 6
0
  def __init__(self, frames, z, zeta, sweep):
    from scitbx import simplex
    from scitbx.array_family import flex
    import numpy
    self.L = Likelihood(FractionOfObservedIntensity(frames, z, zeta, sweep.get_scan()))

    x = 0.1 + numpy.arange(1000) / 2000.0
    l = [self.L(xx) for xx in x]
    from matplotlib import pylab
    pylab.plot(x, l)
    pylab.show()

   # print 1/0

    startA = 0.3
    startB = 0.4
#        startA = 0.2*3.14159 / 180
#        startB = 0.3*3.14159 / 180

    print "Start: ", startA, startB
    starting_simplex=[flex.double([startA]), flex.double([startB])]
#        for ii in range(2):
#            starting_simplex.append(flex.double([start]))#flex.random_double(1))

    self.optimizer = simplex.simplex_opt(
        1, matrix=starting_simplex, evaluator=self, tolerance=1e-7)
Ejemplo n.º 7
0
def bo_(x_obs, y_obs):
    kernel = kernels.Matern() + kernels.WhiteKernel()
    gp = GaussianProcessRegressor(kernel=kernel, n_restarts_optimizer=16)
    gp.fit(x_obs, y_obs)

    xs = list(repeat(np.atleast_2d(np.linspace(0, 10, 128)).T, 2))
    x = cartesian_product(*xs)

    a = a_EI(gp, x_obs=x_obs, y_obs=y_obs)

    argmin_a_x = x[np.argmax(a(x))]

    # heavy evaluation
    print("f({})".format(argmin_a_x))
    f_argmin_a_x = f2d(np.atleast_2d(argmin_a_x))


    plot_2d(gp, x_obs, y_obs, argmin_a_x, a, xs)
    plt.show()


    bo_(
        x_obs=np.vstack((x_obs, argmin_a_x)),
        y_obs=np.hstack((y_obs, f_argmin_a_x)),
    )
Ejemplo n.º 8
0
def compareFrequencies():
	times = generateTimes(sampleFreq, numSamples)
	signal = (80.0, 0.1)
	coherent = (60.0, 1.0)
	incoherent = (60.1, 1.0)
	highFNoise = (500.0, 0.01)
	timeData = generateTimeDomain(times, [signal, coherent, highFNoise])
	timeData2 = generateTimeDomain(times, [signal, incoherent, highFNoise])
	#timeData3 = generateTimeDomain(times, [signal, highFNoise])
	
	#timeData = generateTimeDomain(times, [(60.0, 1.0)])
	#timeData2 = generateTimeDomain(times, [(61.0, 1.0)])
	
	roi = (0, 20)
	
	freqData = map(toDb, map(dtype, map(absolute, fourier(timeData))))[roi[0]:roi[1]]
	freqData2 = map(toDb, map(dtype, map(absolute, fourier(timeData2))))[roi[0]:roi[1]]
	#freqData3 = map(toDb, map(dtype, map(absolute, fourier(timeData3))))[roi[0]:roi[1]]
	
	frequencies = generateFFTFrequencies(sampleFreq, numSamples)[roi[0]:roi[1]]
	
	#pylab.subplot(111)
	pylab.plot(frequencies, freqData)
	
	#pylab.subplot(112)
	pylab.plot(frequencies, freqData2)
	
	#pylab.plot(frequencies, freqData3)
	
	pylab.grid(True)
	pylab.show()
 def plot_values(self, TITLE, SAVE):
     plot(self.list_of_densities, self.list_of_pressures)
     title(TITLE)
     xlabel("Densities")
     ylabel("Pressure")
     savefig(SAVE)
     show()
Ejemplo n.º 10
0
def plotConfusionLines(deficit='tritan', clip=True):
    '''add confusion lines
        '''
    space = colorSpace(fundamental='neitz', LMSpeaks=[559, 530, 417])
    space.plotColorSpace()
    space.find_copunctuals()
    print deficit, ': ', space.copunctuals[deficit]
    
    if deficit.lower() == 'deutan' or deficit.lower() == 'protan':
        lambdas = [420, 460, 470, 480, 490, 500, 515,]
    elif deficit.lower() == 'tritan':
        lambdas = [420, 460, 480, 500, 520, 535, 545, 555,
                   570, 585, 600, 625, 700]
    
    space.cs_ax.plot(space.copunctuals[deficit][0],
                    space.copunctuals[deficit][1], 'ko', markersize=8)
    for lam in lambdas:
        space.cs_ax.plot([space.find_testLightMatch(lam)[0],
                         space.copunctuals[deficit][0]],
                        [space.find_testLightMatch(lam)[1],
                         space.copunctuals[deficit][1]],
                        'k-', linewidth=1)   
    
    space.cs_ax.text(0.7, 1, deficit, fontsize=18)
    if clip is True:                
        space.cs_ax.set_xlim([-0.4, 1.2])
        space.cs_ax.set_ylim([-0.2, 1.2])
    plt.show()                 
Ejemplo n.º 11
0
def plotConeSpace():
    '''
    '''
    space = colorSpace(fundamental='neitz',
                             LMSpeaks=[559.0, 530.0, 421.0])
    space.plotColorSpace(space.Lnorm, space.Mnorm, space.spectrum)
    plt.show()
def viz_docwordfreq_sidebyside(P1, P2, title1='', title2='', 
                                vmax=None, aspect=None, block=False):
  from matplotlib import pylab
  pylab.figure()

  if vmax is None:
    vmax = 1.0
    P1limit = np.percentile(P1.flatten(), 97)
    if P2 is not None:
      P2limit = np.percentile(P2.flatten(), 97)
    else:
      P2limit = P1limit
    while vmax > P1limit and vmax > P2limit:
      vmax = 0.8 * vmax

  if aspect is None:
    aspect = float(P1.shape[1])/P1.shape[0]
  pylab.subplot(1, 2, 1)
  pylab.imshow(P1, aspect=aspect, interpolation='nearest', vmin=0, vmax=vmax)
  if len(title1) > 0:
    pylab.title(title1)
  if P2 is not None:
    pylab.subplot(1, 2, 2)
    pylab.imshow(P2, aspect=aspect, interpolation='nearest', vmin=0, vmax=vmax)
    if len(title2) > 0:
      pylab.title(title2)
  pylab.show(block=block)
Ejemplo n.º 13
0
def plot_dichromatic_system(hybrid='ls', clip=True):
    '''
    '''
    space = colorSpace(fundamental='neitz', LMSpeaks=[559, 530, 421])
    space.plotColorSpace()
    
    for x in np.arange(0, 1.1, 0.1):
        if hybrid.lower() == 'ls' or hybrid.lower() == 'sl':
            s = x
            m = 0
            l = -(1.0 - x)
        elif hybrid.lower() == 'lm' or hybrid.lower() == 'ml':
            s = 0
            m = x
            l = -(1.0 - x)
        elif hybrid.lower() == 'ms' or hybrid.lower() == 'sm':
            s = x
            m = -(1.0 - x)
            l = 0
        else:
            raise InputError('hybrid must be ls, lm or ms')

        copunct = space.lms_to_rgb([l, m, s])
        neutral_points = space.find_spect_neutral(copunct)
        for neut in neutral_points:
            space.cs_ax.plot([neut[0], copunct[0]], [neut[1], copunct[1]], 
                             '-o', c=(np.abs(l), np.abs(m), np.abs(s)), 
                             markersize=8, linewidth=2)

    if clip is True:                
        space.cs_ax.set_xlim([-0.4, 1.2])
        space.cs_ax.set_ylim([-0.2, 1.2])
    
    plt.show()
Ejemplo n.º 14
0
def plotGetRetangle():
    """ Area selection from selected pen.
    """
    selRect = []
    if len(ds.EpmDatasetAnalysisPens.SelectedPens) != 1:
        sr.msgBox('EPM Python Plugin - Demo Tools', 'Please select a single pen before applying this function!', 'Warning')
        return 0
    epmData = ds.EpmDatasetAnalysisPens.SelectedPens[0].values
    y = epmData['Value'].copy()
    x = np.arange(len(y))
    fig, current_ax = pl.subplots()
    pl.plot(x, y, lw=2, c='g', alpha=.3)

    def line_select_callback(eclick, erelease):
        'eclick and erelease are the press and release events'
        x1, y1 = eclick.xdata, eclick.ydata
        x2, y2 = erelease.xdata, erelease.ydata
        print ("\n(%3.2f, %3.2f) --> (%3.2f, %3.2f)" % (x1, y1, x2, y2))
        selRect.append((int(x1), y1, int(x2), y2))

    def toggle_selector(event):
        if event.key in ['Q', 'q'] and toggle_selector.RS.active:
            toggle_selector.RS.set_active(False)
        if event.key in ['A', 'a'] and not toggle_selector.RS.active:
            toggle_selector.RS.set_active(True)
    toggle_selector.RS = RectangleSelector(current_ax, line_select_callback, drawtype='box', useblit=True, button=[1,3], minspanx=5, minspany=5, spancoords='pixels')
    pl.connect('key_press_event', toggle_selector)
    pl.show()
    return selRect
Ejemplo n.º 15
0
def plot_grid_experiment_results(grid_results, params, metrics):
    global plt
    params = sorted(params)
    grid_params = grid_results.grid_params
    plt.figure(figsize=(8, 6))
    for metric in metrics:
        grid_params_shape = [len(grid_params[k]) for k in sorted(grid_params.keys())]
        params_max_out = [(1 if k in params else 0) for k in sorted(grid_params.keys())]
        results = np.array([e.results.get(metric, 0) for e in grid_results.experiments])
        results = results.reshape(*grid_params_shape)
        for axis, included_in_params in enumerate(params_max_out):
            if not included_in_params:
                results = np.apply_along_axis(np.max, axis, results)

        print results
        params_shape = [len(grid_params[k]) for k in sorted(params)]
        results = results.reshape(*params_shape)

        if len(results.shape) == 1:
            results = results.reshape(-1,1)
        import matplotlib.pylab as plt

        #f.subplots_adjust(left=.2, right=0.95, bottom=0.15, top=0.95)
        plt.imshow(results, interpolation='nearest', cmap=plt.cm.hot)
        plt.title(str(grid_results.name) + " " + metric)

        if len(params) == 2:
            plt.xticks(np.arange(len(grid_params[params[1]])), grid_params[params[1]], rotation=45)
        plt.yticks(np.arange(len(grid_params[params[0]])), grid_params[params[0]])
        plt.colorbar()
        plt.show()
    def plot(self, bit_stream):
        if self.previous_bit_stream != bit_stream.to_list():
            self.previous_bit_stream = bit_stream

            x = []
            y = []
            bit = None

            for bit_time in bit_stream.to_list():
                if bit is None:
                    x.append(bit_time)
                    y.append(0)
                    bit = 0
                elif bit == 0:
                    x.extend([bit_time, bit_time])
                    y.extend([0, 1])
                    bit = 1
                elif bit == 1:
                    x.extend([bit_time, bit_time])
                    y.extend([1, 0])
                    bit = 0

            plt.clf()
            plt.plot(x, y)
            plt.xlim([0, 10000])
            plt.ylim([-0.1, 1.1])
            plt.show()
            plt.pause(0.005)
Ejemplo n.º 17
0
def plotGetSelection():
    """ Get data range from selected pen.
    """
    selData = []
    if len(ds.EpmDatasetAnalysisPens.SelectedPens) != 1:
        sr.msgBox('EPM Python Plugin - Demo Tools', 'Please select a single pen before applying this function!', 'Warning')
        return 0
    epmData = ds.EpmDatasetAnalysisPens.SelectedPens[0].values
    y = epmData['Value'].copy()
    x = np.arange(len(y))
    fig = pl.figure(figsize=(8,6))
    ax = fig.add_subplot(211, axisbg='#FFFFCC')
    ax.plot(x, y, '-')
    ax.set_title('Press left mouse button and drag to test')
    ax2 = fig.add_subplot(212, axisbg='#FFFFCC')
    line2, = ax2.plot(x, y, '-')

    def onselect(xmin, xmax):
        selData.append([xmin, xmax])
        indmin, indmax = np.searchsorted(x, (xmin, xmax))
        indmax = min(len(x)-1, indmax)
        thisx = x[indmin:indmax]
        thisy = y[indmin:indmax]
        line2.set_data(thisx, thisy)
        ax2.set_xlim(thisx[0], thisx[-1])
        ax2.set_ylim(thisy.min(), thisy.max())
        fig.canvas.draw()

    span = SpanSelector(ax, onselect, 'horizontal', useblit=True, rectprops=dict(alpha=0.5, facecolor='red') )
    pl.show()
    return selData
Ejemplo n.º 18
0
	def plot_cell(self,cell_number=0,label='insert_label'):

		current_cell = self.cell_list[cell_number]
		temp = current_cell.temp
		cd_signal = current_cell.cd_signal
		cd_calc = current_cell.cd_calc()
		
		ax = pylab.gca()

		pylab.plot(temp,cd_signal,'o',color='black')
                pylab.plot(temp,cd_calc,color='black')
		pylab.xlabel(r'Temperature ($^{\circ}$C)')
		pylab.ylabel('mdeg')
		pylab.ylim([-25,-4])
		dH = numpy.round(current_cell.dH, decimals=1)
		Tm = numpy.round(current_cell.Tm-273.15, decimals=1)
		nf = current_cell.nf
		nu = current_cell.nu
		textstr_dH = '${\Delta}H_{m}$ = %.1f kcal/mol' %dH
		textstr_Tm ='$T_{m}$ = %.1f $^{\circ}$C' %Tm
		textstr_nf ='$N_{folded}$ = %d' %nf
		textstr_nu ='$N_{unfolded}$ = %d'%nu
		ax.text(8,-6,textstr_dH, fontsize=16,ha='left',va='top')
		ax.text(8,-7.5,textstr_Tm, fontsize=16,ha='left',va='top')
		ax.text(8,-9,textstr_nf, fontsize=16,ha='left',va='top')
		ax.text(8,-10.5,textstr_nu, fontsize=16,ha='left',va='top')
		pylab.title(label)		
		pylab.show()

		return
Ejemplo n.º 19
0
def plotSpecSens(plot_norm=False, log=True):
    '''
    '''
    Lnorm, Mnorm, Snorm = genLMS(spectrum, filters, 
        fundamental='neitz', LMSpeaks=[559, 530, 419])
    L, M, S = genLMS(spectrum, filters, remove_filters=False,
        fundamental='neitz', LMSpeaks=[559, 530, 419])

    fig = plt.figure()
    fig.set_tight_layout(True)
    ax = fig.add_subplot(111)
    pf.AxisFormat()
    pf.TufteAxis(ax, ['left', 'bottom'], Nticks=[5, 5])

    if not log:
        ax.plot(spectrum, L, 'r-')
        ax.plot(spectrum, M, 'g-')
        ax.plot(spectrum, S, 'b-')
        ax.set_ylim([-0.01, 1.01])
    else:
        ax.semilogy(spectrum, L, 'r-')
        ax.semilogy(spectrum, M, 'g-')
        ax.semilogy(spectrum, S, 'b-')
        ax.set_ylim([10 ** -4, 10 ** -0])
    
    if plot_norm:
        ax.plot(spectrum, Snorm, 'b', linewidth=2)
        ax.plot(spectrum, Mnorm, 'g', linewidth=2)
        ax.plot(spectrum, Lnorm, 'r', linewidth=2)

    ax.set_xlim([380, 781])
    ax.set_xlabel('wavelength (nm)')
    ax.set_ylabel('sensitivity')
    plt.show()
Ejemplo n.º 20
0
def viz_birth_proposal_2D(curModel, newModel, ktarget, freshCompIDs,
                          title1='Before Birth',
                          title2='After Birth'):
  ''' Create before/after visualization of a birth move (in 2D)
  '''
  from ..viz import GaussViz, BarsViz
  from matplotlib import pylab

  fig = pylab.figure()
  h1 = pylab.subplot(1,2,1)

  if curModel.obsModel.__class__.__name__.count('Gauss'):
    GaussViz.plotGauss2DFromHModel(curModel, compsToHighlight=ktarget)
  else:
    BarsViz.plotBarsFromHModel(curModel, compsToHighlight=ktarget, figH=h1)
  pylab.title(title1)
    
  h2 = pylab.subplot(1,2,2)
  if curModel.obsModel.__class__.__name__.count('Gauss'):
    GaussViz.plotGauss2DFromHModel(newModel, compsToHighlight=freshCompIDs)
  else:
    BarsViz.plotBarsFromHModel(newModel, compsToHighlight=freshCompIDs, figH=h2)
  pylab.title(title2)
  pylab.show(block=False)
  try: 
    x = raw_input('Press any key to continue >>')
  except KeyboardInterrupt:
    import sys
    sys.exit(-1)
  pylab.close()
Ejemplo n.º 21
0
 def plot_fft(self,b):
     a = len(self.fullfft_dft_py_fc_0.output)
     
     for i in range(0,b):
         self.frq.append(i)
     plt.plot(self.frq,self.fullfft_dft_py_fc_0.output)
     plt.show()
Ejemplo n.º 22
0
def _fig_density(sweight, surweight, pval, nlm):
    """
    Plot the histogram of sweight across the image
    and the thresholds implied by the surrogate model (surweight)
    """
    import matplotlib.pylab as mp
    # compute some thresholds
    nlm = nlm.astype('d')
    srweight = np.sum(surweight,1)
    srw = np.sort(srweight)
    nitem = np.size(srweight)
    thf = srw[int((1-min(pval,1))*nitem)]
    mnlm = max(1,nlm.mean())
    imin = min(nitem-1,int((1.-pval/mnlm)*nitem))
    
    thcf = srw[imin]
    h,c = np.histogram(sweight,100)
    I = h.sum()*(c[1]-c[0])
    h = h/I
    h0,c0 = np.histogram(srweight,100)
    I0 = h0.sum()*(c0[1]-c0[0])
    h0 = h0/I0
    mp.figure(1)
    mp.plot(c,h)
    mp.plot(c0,h0)
    mp.legend(('true histogram','surrogate histogram'))
    mp.plot([thf,thf],[0,0.8*h0.max()])
    mp.text(thf,0.8*h0.max(),'p<0.2, uncorrected')
    mp.plot([thcf,thcf],[0,0.5*h0.max()])
    mp.text(thcf,0.5*h0.max(),'p<0.05, corrected')
    mp.savefig('/tmp/histo_density.eps')
    mp.show()
def item_nbr_tendency(store_nbr):
    '''
    input : store_nbr
    output : graph representing units groupped by each year, each month
    '''
    store = df_1[df_1['store_nbr'] == store_nbr]

    pivot = store.pivot_table(index=['year','month'],columns='item_nbr',values='units',aggfunc=np.sum)
    zero_index = pivot==0
    pivot = pivot[pivot!=0].dropna(axis=1,how='all')
    pivot[zero_index]=0
    
    
    pivot_2012 = pivot.loc[2012]
    pivot_2013 = pivot.loc[2013]
    pivot_2014 = pivot.loc[2014]
    
    plt.figure(figsize=(12,8))
    plt.subplot(131)
    sns.heatmap(pivot_2012,cmap="YlGnBu", annot = True, fmt = '.0f')
    plt.subplot(132)
    sns.heatmap(pivot_2013,cmap="YlGnBu", annot = True, fmt = '.0f')
    plt.subplot(133)
    sns.heatmap(pivot_2014,cmap="YlGnBu", annot = True, fmt = '.0f')
    plt.show()
Ejemplo n.º 24
0
def plot_q(model='cem', r_min=0.0, r_max=6371.0, dr=1.0):
    """
    Plot a radiallysymmetric Q model.

    plot_q(model='cem', r_min=0.0, r_max=6371.0, dr=1.0):

    r_min=minimum radius [km], r_max=maximum radius [km], dr=radius
    increment [km]

    Currently available models (model): cem, prem, ql6
    """
    import matplotlib.pylab as plt

    r = np.arange(r_min, r_max + dr, dr)
    q = np.zeros(len(r))

    for k in range(len(r)):

        if model == 'cem':
            q[k] = q_cem(r[k])
        elif model == 'ql6':
            q[k] = q_ql6(r[k])
        elif model == 'prem':
            q[k] = q_prem(r[k])

    plt.plot(r, q, 'k')
    plt.xlim((0.0, r_max))
    plt.xlabel('radius [km]')
    plt.ylabel('Q')
    plt.show()
def item_nbr_tendency_finely(store_nbr, year, month_start=-1, month_end=-1, graph=True):
    '''
    input
        1. store_nbr = 스토어 번호
        2. year = 연도
        3. month_start = 시작달
        4. month_start = 끝달
        5. graph = 위의 정보에 대한 item_nbr 그래프 출력여부
    
    output
        1. store_nbr, year, month로 filtering한 item_nbr의 pivot 테이블
    '''
    store = df_1[(df_1['store_nbr'] == store_nbr) &
                 (df_1['year'] == year)]

    if month_start != -1:
        if month_end == -1:
            month_end = month_start + 1
        store = store[(month_start <= store['month']) & (store['month'] < month_end)]

    pivot = store.pivot_table(index='item_nbr',
                              columns='date',
                              values='units',
                              aggfunc=np.sum)

    zero_index = pivot == 0
    pivot = pivot[pivot != 0].dropna(axis=0, how='all')
    pivot[zero_index] = 0

    if graph:
        plt.figure(figsize=(12, 8))
        sns.heatmap(pivot, cmap="YlGnBu", annot=True, fmt='.0f')
        plt.show()

    return pivot
Ejemplo n.º 26
0
def flipPlot(minExp, maxExp):
    """假定minEXPy和maxExp是正整数且minExp<maxExp
    绘制出2**minExp到2**maxExp次抛硬币的结果
    """
    ratios = []
    diffs = []
    aAxis = []
    for i in range(minExp, maxExp+1):
        aAxis.append(2**i)
    for numFlips in aAxis:
        numHeads = 0
        for n in range(numFlips):
            if random.random() < 0.5:
                numHeads += 1
        numTails = numFlips - numHeads
        ratios.append(numHeads/numFlips)
        diffs.append(abs(numHeads-numTails))
    plt.figure()
    ax1 = plt.subplot(121)
    plt.title("Difference Between Heads and Tails")
    plt.xlabel('Number of Flips')
    plt.ylabel('Abs(#Heads - #Tails)')
    ax1.semilogx(aAxis, diffs, 'bo')
    ax2 = plt.subplot(122)
    plt.title("Heads/Tails Ratios")
    plt.xlabel('Number of Flips')
    plt.ylabel("#Heads/#Tails")
    ax2.semilogx(aAxis, ratios, 'bo')
    plt.show()
Ejemplo n.º 27
0
def plotting():
    # plt.ion()
    countries = ['France', 'Spain', 'Sweden', 'Germany', 'Finland', 'Poland',
                 'Italy',
                 'United Kingdom', 'Romania', 'Greece', 'Bulgaria', 'Hungary',
                 'Portugal', 'Austria', 'Czech Republic', 'Ireland',
                 'Lithuania', 'Latvia',
                 'Croatia', 'Slovakia', 'Estonia', 'Denmark', 'Netherlands',
                 'Belgium']
    extensions = [547030, 504782, 450295, 357022, 338145, 312685, 301340,
                  243610, 238391,
                  131940, 110879, 93028, 92090, 83871, 78867, 70273, 65300,
                  64589, 56594,
                  49035, 45228, 43094, 41543, 30528]
    populations = [63.8, 47, 9.55, 81.8, 5.42, 38.3, 61.1, 63.2, 21.3, 11.4,
                   7.35,
                   9.93, 10.7, 8.44, 10.6, 4.63, 3.28, 2.23, 4.38, 5.49, 1.34,
                   5.61,
                   16.8, 10.8]
    life_expectancies = [81.8, 82.1, 81.8, 80.7, 80.5, 76.4, 82.4, 80.5, 73.8,
                         80.8, 73.5,
                         74.6, 79.9, 81.1, 77.7, 80.7, 72.1, 72.2, 77, 75.4,
                         74.4, 79.4, 81, 80.5]
    data = {'extension': pd.Series(extensions, index=countries),
            'population': pd.Series(populations, index=countries),
            'life expectancy': pd.Series(life_expectancies, index=countries)}

    df = pd.DataFrame(data)
    print(df)
    df = df.sort('life expectancy')
    fig, axes = plt.subplots(nrows=3, ncols=1)
    for i, c in enumerate(df.columns):
        df[c].plot(kind='bar', ax=axes[i], figsize=(12, 10), title=c)
    plt.show()
Ejemplo n.º 28
0
def test_params():
    #x = np.linspace(.8, 1.2, 1e2)
    x = np.linspace(-.2, .2, 1e2)

    num = 5
    range_a = np.linspace(1, 2, num)
    range_b = np.linspace(1., 1.1, num)
    range_p = np.linspace(.1, .4, num)
    range_q = np.linspace(.1, .4, num)
    range_T = np.linspace(30, 365, num) / 365

    args_def = {'a' : range_a.mean(), 'b' : range_b.mean(),
                'p' : range_p.mean(), 'q' : range_q.mean(),
                'T' : range_T.mean()}

    ranges = {'a' : range_a, 'b' : range_b,
              'p' : range_p, 'q' : range_q, 'T' : range_T}

    fig, axes = plt.subplots(nrows = len(ranges), figsize = (6,12))
    for name, a in zip(sorted(ranges.keys()), axes):
        args = args_def.copy()
        for pi in ranges[name]:
            args[name] = pi
            f = GB2(**args).density(x)
            a.plot(x, f, label = pi)
        a.legend(title = name)
    plt.show()
Ejemplo n.º 29
0
def test_likelihood_evaluator3():
    
    tr = template.TemplateRenderCircleBorder()
    tr.set_params(14, 6, 4)

    t1 = tr.render(0, np.pi/2)
    img = np.zeros((240, 320), dtype=np.uint8)

    env = util.Environmentz((1.5, 2.0), (240, 320))
    
    le2 = likelihood.LikelihoodEvaluator3(env, tr)

    img[(120-t1.shape[0]/2):(120+t1.shape[0]/2), 
        (160-t1.shape[1]/2):(160+t1.shape[1]/2)] += t1 *255
    pylab.subplot(1, 2, 1)
    pylab.imshow(img, interpolation='nearest', cmap=pylab.cm.gray)

    state = np.zeros(1, dtype=util.DTYPE_STATE)

    xvals = np.linspace(0, 2.,  100)
    yvals = np.linspace(0, 1.5, 100)
    res = np.zeros((len(yvals), len(xvals)), dtype=np.float32)
    for yi, y in enumerate(yvals):
        for xi, x in enumerate(xvals):
            state[0]['x'] = x
            state[0]['y'] = y
            state[0]['theta'] = np.pi / 2. 
            res[yi, xi] =     le2.score_state(state, img)
    pylab.subplot(1, 2, 2)
    pylab.imshow(res)
    pylab.colorbar()
    pylab.show()
Ejemplo n.º 30
0
def check_isometry(G, chart, nseeds=100, verbose = 0):
    """
    A simple check of the Isometry:
    look whether the output distance match the intput distances
    for nseeds points
    
    Returns
    -------
    a scaling factor between the proposed and the true metrics
    """
    nseeds = np.minimum(nseeds, G.V)
    aux = np.argsort(nr.rand(nseeds))
    seeds =  aux[:nseeds]
    dY = Euclidian_distance(chart[seeds],chart)
    dx = G.floyd(seeds)

    dY = np.reshape(dY,np.size(dY))
    dx = np.reshape(dx,np.size(dx))

    if verbose:
        import matplotlib.pylab as mp
        mp.figure()
        mp.plot(dx,dY,'.')
        mp.show()

    scale = np.dot(dx,dY)/np.dot(dx,dx)
    return scale
Ejemplo n.º 31
0
def test_balde():
#    path = r'E:\Project\EIS\FEwork\Data\FASTV8_demo_a\demo_a\Test11_AD.ipt' 
    path = r'E:\Project\EIS\FEwork\Data\FASTV8_WP1.5\WP_Baseline\Test11_AD.ipt' 
    ad = ADipt(path)
    ad.plotfoils()
    plt.show()
Ejemplo n.º 32
0
x_opt = optimize.fsolve(funcv, x_guess.copy())
print 'optimal x:', x_opt
print 'optimal f(x):', funcv(x_opt)

# Make some pretty plots to show the function space as well
# as the solver starting point and the solution.

# Create 2D arrays x and y and evaluate them so that we
# can get the results for f0 and f1 in the system of equations.
x, y = mgrid[-100:100:.5, -100:100:.5]
f0, f1 = funcv((x, y))

# Set up a plot of f0 and f1 vs. x and y and show the
# starting and ending point of the solver on each plot.
pylab.figure(figsize=(14, 5))
pylab.subplot(1, 2, 1)
pylab.imshow(f0, extent=(-100, 100, -100, 100), cmap=pylab.cm.coolwarm)
pylab.hold(True)
pylab.plot([x_guess[0]], [x_guess[1]], 'go')
pylab.plot([x_opt[0]], [x_opt[1]], 'ro')
pylab.colorbar()

pylab.subplot(1, 2, 2)
pylab.imshow(f1, extent=(-100, 100, -100, 100), cmap=pylab.cm.coolwarm)
pylab.hold(True)
pylab.plot([x_guess[0]], [x_guess[1]], 'go')
pylab.plot([x_opt[0]], [x_opt[1]], 'ro')
pylab.colorbar()

pylab.show()
Ejemplo n.º 33
0
def plot_learning_curve(estimator,
                        title,
                        X,
                        y,
                        ylim=None,
                        n_jobs=1,
                        train_sizes=[100, 300, 600, 1000, 1886],
                        cv=10,
                        verbose=0,
                        plot=True):
    """
    画出data在某模型上的learning curve.
    参数解释
    ----------
    estimator : 你用的分类器。
    title : 表格的标题。
    X : 输入的feature,numpy类型
    y : 输入的target vector
    ylim : tuple格式的(ymin, ymax), 设定图像中纵坐标的最低点和最高点
    cv : 做cross-validation的时候,数据分成的份数,其中一份作为cv集,其余n-1份作为training(默认为3份)
    n_jobs : 并行的的任务数(默认1)
    """
    train_sizes, train_scores, test_scores = learning_curve(
        estimator,
        X,
        y,
        cv=cv,
        n_jobs=n_jobs,
        train_sizes=train_sizes,
        verbose=verbose)

    train_scores_mean = np.mean(train_scores, axis=1)
    train_scores_std = np.std(train_scores, axis=1)
    test_scores_mean = np.mean(test_scores, axis=1)
    test_scores_std = np.std(test_scores, axis=1)

    if plot:
        plt.figure()
        plt.title(title)
        if ylim is not None:
            plt.ylim(*ylim)
        plt.xlabel(u"train_sample")
        plt.ylabel(u"score")
        plt.gca().invert_yaxis()
        plt.grid()

        plt.fill_between(train_sizes,
                         train_scores_mean - train_scores_std,
                         train_scores_mean + train_scores_std,
                         alpha=0.1,
                         color="b")
        plt.fill_between(train_sizes,
                         test_scores_mean - test_scores_std,
                         test_scores_mean + test_scores_std,
                         alpha=0.1,
                         color="r")
        plt.plot(train_sizes,
                 train_scores_mean,
                 'o-',
                 color="b",
                 label=u"train_score")
        plt.plot(train_sizes,
                 test_scores_mean,
                 'o-',
                 color="r",
                 label=u"cross_validation_score")

        plt.legend(loc="best")

        plt.draw()
        plt.show()
        plt.gca().invert_yaxis()
        plt.savefig("learn_curve.jpg")

    midpoint = ((train_scores_mean[-1] + train_scores_std[-1]) +
                (test_scores_mean[-1] - test_scores_std[-1])) / 2
    diff = (train_scores_mean[-1] + train_scores_std[-1]) - (
        test_scores_mean[-1] - test_scores_std[-1])
    return midpoint, diff
Ejemplo n.º 34
0
if __name__ == "__main__":
    import pandas as pd
    data = pd.read_csv('../pycwt/sample/circ_hard.csv')
    bclass = Bioluminescence(data.x, data.y, period_guess=24.)

    bclass.detrend()
    bclass.continuous_wavelet_transform()

    import matplotlib.pylab as plt
    fig = plt.figure()
    ax = fig.add_subplot(111)

    bclass.plot_cwt(ax)

    plt.show()

# 
#     PlotOptions()
# 
#     from MelanopsinData import xn, yn
# 
#     est_period = estimate_period(xn, yn)
# 
#     x, y_even = even_resample(xn, yn, res=300)
# 
#     # Add additional noise
#     baseline = 0.5*(1 + np.sin(x*2*np.pi/160))
#     y_even += baseline
#     y_even += 0.1*np.random.rand(y_even.size)
# 
Ejemplo n.º 35
0
exp = np.vectorize(lambda x: sympy.exp(sympy.S(x)))
x = np.arange(100)
mu = 50

ps = [0.1, 0.5, 0.9]
styles = ['r:', 'k-.', 'g--']
fs = [1 + (1-p)/p * mu for p in ps]
labels = ['NB({:.2f}, {:.1f})'.format(f,p) for f,p in zip(fs,ps)]

pl.plot(x, binom.pmf(x, 100, .5), 'b-', label='binom(100, 0.5)')

for i,p in enumerate(ps):
  log_probabilities = logNegBinom(x, fs[i], p)
  pl.plot(x, exp(log_probabilities), styles[i], label=labels[i])

pl.legend()
pl.savefig('negBinomDemo_1.png')
pl.show()

fs = [1, 10, 30, 50]
styles = ['b-', 'r:', 'k-.', 'g--']
labels = ['NB({:.1f}, 0.5)'.format(n) for n in fs]

for i,n in enumerate(fs):
  log_probabilities = logNegBinom(x, fs[i], 0.5)
  pl.plot(x, exp(log_probabilities), styles[i], label=labels[i])
pl.axis([0, 100, 0, 0.25])
pl.legend()
pl.savefig('negBinomDemo_2.png')
pl.show() 
def onpick(event):
    ind = event.ind
    #print('onpick scatter event number:', ind)
    #print('Shown index', ind[0])
    #print('length of index', len(ind))
    #print('area of event', ds_child["area_um"][ind[0]])

    #plt.figure(figsize=(10,5))
    samples = ds.config["fluorescence"]["samples per event"]
    sample_rate = ds.config["fluorescence"]["sample rate"]
    t = np.arange(samples) / sample_rate * 1e6

    figure, axes = plt.subplots(nrows=5, sharex=False, sharey=False)
    axes[0] = plt.subplot2grid((5, 3), (0, 0), colspan=5)
    axes[1] = plt.subplot2grid((5, 3), (1, 0), colspan=5)
    axes[2] = plt.subplot2grid((5, 3), (2, 1))
    axes[3] = plt.subplot2grid((5, 3), (3, 1))
    axes[4] = plt.subplot2grid((5, 3), (4, 1))
    axes[0].imshow(ds_child["image"][ind[0]], cmap="gray")
    axes[1].imshow(ds_child["mask"][ind[0]])
    axes[2].plot(t,
                 ds_child["trace"]["fl1_median"][ind[0]],
                 color="#16A422",
                 label=ds.config["fluorescence"]["channel 1 name"])
    axes[3].plot(t,
                 ds_child["trace"]["fl2_median"][ind[0]],
                 color="#CE9720",
                 label=ds.config["fluorescence"]["channel 2 name"])
    axes[4].plot(t,
                 ds_child["trace"]["fl3_median"][ind[0]],
                 color="#CE2026",
                 label=ds.config["fluorescence"]["channel 3 name"])

    axes[2].set_xlim(0, 570)  #(200, 350)
    axes[2].grid()
    axes[3].set_xlim(0, 570)  #(200, 350)
    axes[3].grid()
    axes[4].set_xlim(0, 570)  #(200, 350)
    axes[4].grid()

    axes[2].axvline(ds_child["fl1_pos"][ind[0]] +
                    ds_child["fl1_width"][ind[0]] / 2,
                    color="gray")
    axes[2].axvline(ds_child["fl1_pos"][ind[0]] -
                    ds_child["fl1_width"][ind[0]] / 2,
                    color="gray")
    #axes[2].axvline(350, color="black")
    #axes[2].axvline(200, color="black")
    axes[3].axvline(ds_child["fl2_pos"][ind[0]] +
                    ds_child["fl2_width"][ind[0]] / 2,
                    color="gray")
    axes[3].axvline(ds_child["fl2_pos"][ind[0]] -
                    ds_child["fl2_width"][ind[0]] / 2,
                    color="gray")
    #axes[3].axvline(350, color="black")
    #axes[3].axvline(200, color="black")
    axes[4].axvline(ds_child["fl3_pos"][ind[0]] +
                    ds_child["fl3_width"][ind[0]] / 2,
                    color="gray")
    axes[4].axvline(ds_child["fl3_pos"][ind[0]] -
                    ds_child["fl3_width"][ind[0]] / 2,
                    color="gray")
    #axes[4].axvline(350, color="black")
    #axes[4].axvline(200, color="black")

    plt.show()
    print(ds_child["trace"][ind[0]])
Ejemplo n.º 37
0
def PlotPolyDrivetrainMotions(drivetrain_params):
    vdrivetrain = VelocityDrivetrain(drivetrain_params)
    vl_plot = []
    vr_plot = []
    ul_plot = []
    ur_plot = []
    radius_plot = []
    t_plot = []
    left_gear_plot = []
    right_gear_plot = []
    vdrivetrain.left_shifter_position = 0.0
    vdrivetrain.right_shifter_position = 0.0
    vdrivetrain.left_gear = VelocityDrivetrain.LOW
    vdrivetrain.right_gear = VelocityDrivetrain.LOW

    glog.debug('K is %s', str(vdrivetrain.CurrentDrivetrain().K))

    if vdrivetrain.left_gear is VelocityDrivetrain.HIGH:
        glog.debug('Left is high')
    else:
        glog.debug('Left is low')
    if vdrivetrain.right_gear is VelocityDrivetrain.HIGH:
        glog.debug('Right is high')
    else:
        glog.debug('Right is low')

    for t in numpy.arange(0, 1.7, vdrivetrain.dt):
        if t < 0.5:
            vdrivetrain.Update(throttle=0.00, steering=1.0)
        elif t < 1.2:
            vdrivetrain.Update(throttle=0.5, steering=1.0)
        else:
            vdrivetrain.Update(throttle=0.00, steering=1.0)
        t_plot.append(t)
        vl_plot.append(vdrivetrain.X[0, 0])
        vr_plot.append(vdrivetrain.X[1, 0])
        ul_plot.append(vdrivetrain.U[0, 0])
        ur_plot.append(vdrivetrain.U[1, 0])
        left_gear_plot.append(
            (vdrivetrain.left_gear is VelocityDrivetrain.HIGH) * 2.0 - 10.0)
        right_gear_plot.append(
            (vdrivetrain.right_gear is VelocityDrivetrain.HIGH) * 2.0 - 10.0)

        fwd_velocity = (vdrivetrain.X[1, 0] + vdrivetrain.X[0, 0]) / 2
        turn_velocity = (vdrivetrain.X[1, 0] - vdrivetrain.X[0, 0])
        if abs(fwd_velocity) < 0.0000001:
            radius_plot.append(turn_velocity)
        else:
            radius_plot.append(turn_velocity / fwd_velocity)

    # TODO(austin):
    # Shifting compensation.

    # Tighten the turn.
    # Closed loop drive.

    pylab.plot(t_plot, vl_plot, label='left velocity')
    pylab.plot(t_plot, vr_plot, label='right velocity')
    pylab.plot(t_plot, ul_plot, label='left voltage')
    pylab.plot(t_plot, ur_plot, label='right voltage')
    pylab.plot(t_plot, radius_plot, label='radius')
    pylab.plot(t_plot, left_gear_plot, label='left gear high')
    pylab.plot(t_plot, right_gear_plot, label='right gear high')
    pylab.legend()
    pylab.show()
Ejemplo n.º 38
0
bp = boxplot(I, positions=[8.8, 9.2], widths=0.38)
setBoxColors(bp, True)

# set axes limits and labels

ax.set_xticklabels(['', '1.', '2.', '3.', '4.', '5.', '6.', '7.', '8.', '9.'])
ax.set_xticks([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
plt.axvline(x=0.5, color="black", linewidth=0.5)
plt.axvline(x=1.5, color="black", linewidth=0.5)
plt.axvline(x=2.5, color="black", linewidth=0.5)
plt.axvline(x=3.5, color="black", linewidth=0.5)
plt.axvline(x=4.5, color="black", linewidth=0.5)
plt.axvline(x=5.5, color="black", linewidth=0.5)
plt.axvline(x=6.5, color="black", linewidth=0.5)
plt.axvline(x=7.5, color="black", linewidth=0.5)
plt.axvline(x=8.5, color="black", linewidth=0.5)

# draw temporary red and blue lines and use them to create a legend
#hB, = plot([1,1],'b-')
#hR, = plot([1,1],'r-')
#hB.set_visible(False)
#R.set_visible(False)
ax.legend([bp["boxes"][0], bp["boxes"][1]], ["PI", "OP_40"],
          loc="upper left",
          prop={'size': 8})
plt.ylabel("AUC")
plt.xlabel("iterace")
plt.tight_layout()
savefig('boxcompare4.png', dpi=1000)
show()
Ejemplo n.º 39
0
else:
    def click(event):
        print [event.key]
        if event.key == 'm':
            mode = raw_input('Enter new mode: ')
            for k in plots:
                try:
                    d = data_mode(plt_data[k], mode)
                    plots[k].set_data(d)
                except(ValueError):
                    print 'Unrecognized plot mode'
            p.draw()
        elif event.key == 'd':
            max = raw_input('Enter new max: ')
            try: max = float(max)
            except(ValueError): max = None
            drng = raw_input('Enter new drng: ')
            try: drng = float(drng)
            except(ValueError): drng = None
            for k in plots:
                _max,_drng = max, drng
                if _max is None or _drng is None:
                    d = plots[k].get_array()
                    if _max is None: _max = d.max()
                    if _drng is None: _drng = _max - d.min()
                plots[k].set_clim(vmin=_max-_drng, vmax=_max)
            print 'Replotting...'
            p.draw()
    p.connect('key_press_event', click)
    p.show()
Ejemplo n.º 40
0
def testVisilibity():
    # Define an epsilon value (should be != 0.0)
    epsilon = 0.0000001

    # Define the points which will be the outer boundary of the environment
    # Must be COUNTER-CLOCK-WISE(ccw)
    p1 = vis.Point(0, 0)
    p2 = vis.Point(700, 0)
    p3 = vis.Point(700, 900)
    p4 = vis.Point(0, 900)

    # Load the values of the outer boundary polygon in order to draw it later
    wall_x = [p1.x(), p2.x(), p3.x(), p4.x(), p1.x()]
    wall_y = [p1.y(), p2.y(), p3.y(), p4.y(), p1.y()]

    # Outer boundary polygon must be COUNTER-CLOCK-WISE(ccw)
    # Create the outer boundary polygon
    walls = vis.Polygon([p1, p2, p3, p4])

    # Define the point of the "observer"
    observer = vis.Point(235, 400)

    # Uncomment the following line in order to create a cone polygon
    # walls = create_cone((observer.x(), observer.y()), 500, 270, 30, quality= 3)

    # Walls should be in standard form
    print('Walls in standard form : ', walls.is_in_standard_form())

    # Now we define some holes for our environment. The holes must be inside
    # our outer boundary polygon. A hole blocks the observer vision, it works as
    # an obstacle in his vision sensor.

    # We define some point for a hole. You can add more points in order to get
    # the shape you want.
    # The smalles point should be first
    p2 = vis.Point(100, 300)
    p3 = vis.Point(100, 500)
    p4 = vis.Point(150, 500)
    p1 = vis.Point(150, 300)

    # Load the values of the hole polygon in order to draw it later
    hole_x = [p1.x(), p2.x(), p3.x(), p4.x(), p1.x()]
    hole_y = [p1.y(), p2.y(), p3.y(), p4.y(), p1.y()]

    # Note: The point of a hole must be in CLOCK-WISE(cw) order.
    # Create the hole polygon
    hole = vis.Polygon([p2, p3, p4, p1])

    # Check if the hole is in standard form
    print('Hole in standard form: ', hole.is_in_standard_form())

    # Define another point of a hole polygon
    # Remember: the list of points must be CLOCK-WISE(cw)
    p1 = vis.Point(300, 300)
    p2 = vis.Point(300, 500)
    p3 = vis.Point(400, 550)
    p4 = vis.Point(400, 300)

    # Load the values of the hole polygon in order to draw it later
    hole1_x = [p1.x(), p2.x(), p3.x(), p4.x(), p1.x()]
    hole1_y = [p1.y(), p2.y(), p3.y(), p4.y(), p1.y()]

    # Create the hole polygon
    hole1 = vis.Polygon([p1, p2, p3, p4])

    # Check if the hole is in standard form
    print('Hole in standard form: ', hole1.is_in_standard_form())

    # Define another point of a hole polygon
    # Remember: the list of points must be CLOCK-WISE(cw)
    p2 = vis.Point(90, 700)
    p3 = vis.Point(250, 750)
    p4 = vis.Point(220, 600)
    p1 = vis.Point(150, 600)

    # Load the values of the hole polygon in order to draw it later
    hole2_x = [p1.x(), p2.x(), p3.x(), p4.x(), p1.x()]
    hole2_y = [p1.y(), p2.y(), p3.y(), p4.y(), p1.y()]

    # Create the hole polygon
    hole2 = vis.Polygon([p2, p3, p4, p1])

    # Check if the hole is in standard form
    print('Hole in standard form: ', hole2.is_in_standard_form())

    # Define another point of a hole polygon
    # Remember: the list of points must be CLOCK-WISE(cw)
    p1 = vis.Point(330, 700)
    p2 = vis.Point(330, 800)
    p3 = vis.Point(530, 850)
    p4 = vis.Point(530, 790)

    # Load the values of the hole polygon in order to draw it later
    hole3_x = [p1.x(), p2.x(), p3.x(), p4.x(), p1.x()]
    hole3_y = [p1.y(), p2.y(), p3.y(), p4.y(), p1.y()]

    # Create the hole polygon
    hole3 = vis.Polygon([p1, p2, p3, p4])

    # Check if the hole is in standard form
    print('Hole in standard form: ', hole3.is_in_standard_form())

    # Define another point of a hole polygon
    # Remember: the list of points must be CLOCK-WISE(cw)
    p1 = vis.Point(230, 50)
    p2 = vis.Point(250, 90)
    p3 = vis.Point(390, 90)
    p4 = vis.Point(390, 50)

    # Load the values of the hole polygon in order to draw it later
    hole4_x = [p1.x(), p2.x(), p3.x(), p4.x(), p1.x()]
    hole4_y = [p1.y(), p2.y(), p3.y(), p4.y(), p1.y()]

    # Create the hole polygon
    hole4 = vis.Polygon([p1, p2, p3, p4])

    # Check if the hole is in standard form
    print('Hole in standard form: ', hole4.is_in_standard_form())

    # Create environment, wall will be the outer boundary because
    # is the first polygon in the list. The other polygons will be holes
    env = vis.Environment([walls, hole, hole2, hole1, hole3, hole4])

    # Check if the environment is valid
    print('Environment is valid : ', env.is_valid(epsilon))

    # Define another point, could be used to check if the observer see it, to
    # check the shortest path from one point to the other, etc.
    end = vis.Point(330, 525)

    # Define another point that the 'observer' will see
    end_visible = vis.Point(415, 45)

    # Necessary to generate the visibility polygon
    observer.snap_to_boundary_of(env, epsilon)
    observer.snap_to_vertices_of(env, epsilon)

    # Obtain the visibility polygon of the 'observer' in the environmente
    # previously define
    isovist = vis.Visibility_Polygon(observer, env, epsilon)

    # Uncomment the following line to obtein the visibility polygon
    # of 'end' in the environmente previously define
    # polygon_vis = vis.Visibility_Polygon(end, env, epsilon)

    # Obtein the shortest path from 'observer' to 'end' and 'end_visible'
    # in the environment previously define
    shortest_path = env.shortest_path(observer, end, epsilon)
    shortest_path1 = env.shortest_path(observer, end_visible, epsilon)

    # Print the length of the path
    print("Shortest Path length from observer to end: ", shortest_path.length())
    print("Shortest Path length from observer to end_visible: ", shortest_path1.length())

    # Check if 'observer' can see 'end', i.e., check if 'end' point is in
    # the visibility polygon of 'observer'
    print("Can observer see end? ", end._in(isovist, epsilon))

    print("Can observer see end_visible? ", end_visible._in(isovist, epsilon))

    # Print the point of the visibility polygon of 'observer' and save them
    # in two arrays in order to draw the polygon later
    point_x, point_y = save_print(isovist)

    # Add the first point again because the function to draw, draw a line from
    # one point to the next one and to close the figure we need the last line
    # from the last point to the first one
    point_x.append(isovist[0].x())
    point_y.append(isovist[0].y())

    # Set the title
    p.title('VisiLibity Test')

    # Set the labels for the axis
    p.xlabel('X Position')
    p.ylabel('Y Position')

    # Plot the outer boundary with black color
    p.plot(wall_x, wall_y, 'black')

    # Plot the position of the observer with a green dot ('go')
    p.plot([observer.x()], [observer.y()], 'go')

    # Plot the position of 'end' with a green dot ('go')
    p.plot([end.x()], [end.y()], 'go')

    # Plot the position of 'end_visible' with a green dot ('go')
    p.plot([end_visible.x()], [end_visible.y()], 'go')

    # Plot the visibility polygon of 'observer'
    p.plot(point_x, point_y)

    # Plot the hole polygon with red color
    p.plot(hole_x, hole_y, 'r')

    # Plot the hole polygon with red color
    p.plot(hole1_x, hole1_y, 'r')

    # Plot the hole polygon with red color
    p.plot(hole2_x, hole2_y, 'r')

    # Plot the hole polygon with red color
    p.plot(hole3_x, hole3_y, 'r')

    # Plot the hole polygon with red color
    p.plot(hole4_x, hole4_y, 'r')

    # Example of a cone-shape polygon
    cone_point = vis.Point(440, 420)
    cone = create_cone([cone_point.x(), cone_point.y()], 150, 0, 45, 3)
    cone_x, cone_y = save_print(cone)
    cone_x.append(cone_x[0])
    cone_y.append(cone_y[0])
    p.plot([cone_point.x()], [cone_point.y()], 'go')
    p.plot(cone_x, cone_y)

    # Show the plot
    p.show()
Ejemplo n.º 41
0
# -- main ----------------------------------------
if __name__ == '__main__':
  import matplotlib.pylab as plt;

  print " TEST1: intersection of two circles "
  AP = STEM_onaxis(0,0,0);
  intersecting_circles = AP._STEM_onaxis__intersecting_circles; ## private function
  plt.figure();
  plt.title("TEST1: Intersection of two circles with radius 1 and 2");
  d = np.arange(0,4,0.1);
  A = [intersecting_circles(1,2,_d) for _d in d];
  plt.plot(d,A);
  plt.axhline(np.pi, color='k');
  plt.xlabel("distance d between circles");
  plt.ylabel("Intersection area");

  print " TEST2: statistics for contributing momentum transfers"
  alpha=30; E=20; E0=60;
  fig=plt.figure(); ax=fig.add_subplot((111));
  plt.title("Contributions of different momentum transfers to STEM-EELS signal");
  q = np.arange(0.001,0.5,0.001)*conv.bohr; # [a.u.]
  for beta in (15,35,48,76):
    AP = STEM_onaxis(alpha,beta,E0);
    w  = AP.weight_q(q,E);
    ax.plot( q / conv.bohr, w, label="beta=%d"%beta );
  plt.xlabel("Momentum transfer q [1/A]");
  plt.ylabel("Weight in EELS signal");
  plt.legend(loc=0);
  plt.show();
Ejemplo n.º 42
0
def perform_solvability_training(initializing,
                                 netname,
                                 numlayers=6,
                                 epochs=3,
                                 training_sets=2,
                                 batch_size=32,
                                 learning_rate=.001):
    """

    Parameters
    ----------
    initializing : boolean
        Is True if the net already exists and we want to continue
        training and False if we want to make a new net.
    netname : string
        The name of the network in the file system.
    numlayers: int, optional
        Number of layers to use in the network. The default is 6.
    epochs: int, optional
        Number of epochs to do per training set. The default is 3.
    training_sets: int, optional
        Number of training sets to sample from all possible data
        points. The default is 5.
    learning_rate: float, optional
        Learning rate of the Adam optimizer. Default is .001.

    Returns
    -------
    The trained model

    """

    # Set up training and test data.  Inputs are positions,
    # outputs are (x,y,direction) tuples encoded to integers
    # and then to one-hot vectors, representing
    # either a push or a win.
    x_test, y_test = utils.load_solvability_data(constants.TEST_LEVELS)

    # This line implicitly assumes that all levels have the same size.
    # Therefore, small levels are padded with unmovables.
    img_x, img_y, img_z = x_test[0].shape

    input_shape = (img_x, img_y, img_z)

    x_test = x_test.astype('float32')
    print(x_test.shape[0], 'test samples')

    dconst = 0.3  # Dropout between hidden layers

    model = None  # To give the variable global scope
    if initializing:
        # Create a convolutional network with numlayers layers of 3 by 3
        # convolutions and a dense layer at the end.
        # Use batch normalization and regularization.
        model = Sequential()
        model.add(BatchNormalization())
        model.add(
            Conv2D(
                64,
                (3, 3),
                activation='relu',
                input_shape=input_shape,
                #padding = 'same'))
                kernel_regularizer=regularizers.l2(.5),
                padding='same'))
        model.add(Dropout(dconst))

        for i in range(numlayers - 1):
            model.add(BatchNormalization())
            model.add(
                Conv2D(
                    64,
                    (3, 3),
                    activation='relu',
                    #padding = 'same'))
                    kernel_regularizer=regularizers.l2(.5),
                    padding='same'))
            model.add(Dropout(dconst))
        model.add(Flatten())
        model.add(Dense(1, activation='sigmoid'))
    else:
        # Load the model and its weights
        json_file = open("networks/policy_" + netname + ".json", "r")
        loaded_model_json = json_file.read()
        json_file.close()
        model = model_from_json(loaded_model_json)
        model.load_weights("networks/policy_" + netname + ".h5")
        print("Loaded model from disk")

    model.compile(loss=tensorflow.keras.losses.binary_crossentropy,
                  optimizer=tensorflow.keras.optimizers.Adam(
                      learning_rate=learning_rate),
                  metrics=['accuracy'])

    # Keep track of the model's accuracy
    class AccuracyHistory(tensorflow.keras.callbacks.Callback):
        def on_train_begin(self, logs={}):
            self.acc = []

        def on_epoch_end(self, batch, logs={}):
            self.acc.append(logs.get('acc'))

    history = AccuracyHistory()

    # Use different training datasets by getting different random
    # samples from the shifts of the input data
    for i in range(training_sets):
        print("training set", i)
        levels_to_train = constants.TRAIN_LEVELS
        x_train, y_train = utils.load_solvability_data(levels_to_train,
                                                       shifts=True)
        utils.shuffle_in_unison(x_train, y_train)
        x_train = x_train.astype('float32')

        # Train the network
        track = model.fit(x_train,
                          y_train,
                          batch_size=batch_size,
                          epochs=epochs,
                          verbose=1,
                          validation_data=(x_test, y_test),
                          callbacks=[history])

    score = model.evaluate(x_test, y_test, verbose=0)
    print('Test loss:', score[0])
    print('Test accuracy:', score[1])
    plt.plot(range(1, epochs + 1), track.history['val_accuracy'])
    plt.plot(range(1, epochs + 1), track.history['accuracy'])
    plt.xlabel('Epochs')
    plt.ylabel('Accuracy')
    plt.show()

    # Save the trained network
    model_json = model.to_json()
    directory = os.getcwd() + '/networks'
    if not os.path.exists(directory):
        os.mkdir(directory)
    with open("networks/solvability_" + netname + ".json", "w") as json_file:
        json_file.write(model_json)

    model.save_weights("networks/solvability_" + netname + ".h5")
    print("Saved model to disk")

    return model
    def fit(self, Xtrain, Ytrain, Xtest, Ytest, epoch=50, learning_rate=0.001, batchsz=100):
        N, D = Xtrain.shape
        M1 = 1000
        M2 = 500

        tf_X = tf.placeholder(dtype=tf.float32)
        tf_Y = tf.placeholder(dtype=tf.float32)

        tf_W1 = tf.Variable(dtype=tf.float32,
                            initial_value=tf.random.normal(shape=(D, M1), mean=0, stddev=tf.math.sqrt(1 / D)))
        tf_b1 = tf.Variable(dtype=tf.float32, initial_value=np.zeros(shape=(M1)))

        tf_W2 = tf.Variable(dtype=tf.float32,
                            initial_value=tf.random.normal(shape=(M1, M2), mean=0, stddev=tf.math.sqrt(1 / M1)))
        tf_b2 = tf.Variable(dtype=tf.float32, initial_value=np.zeros(shape=(M2)))

        tf_W3 = tf.Variable(dtype=tf.float32,
                            initial_value=tf.random.normal(shape=(M2, self.NUM_CLASSES), mean=0,
                                                           stddev=tf.math.sqrt(1 / M2)))
        tf_b3 = tf.Variable(dtype=tf.float32, initial_value=np.zeros(shape=(self.NUM_CLASSES)))

        tf_Z1 = tf.nn.relu(tf.matmul(tf_X, tf_W1) + tf_b1)
        tf_Z2 = tf.nn.relu(tf.matmul(tf_Z1, tf_W2) + tf_b2)
        tf_Yhat = tf.nn.softmax(tf.matmul(tf_Z2, tf_W3) + tf_b3)

        tf_cost = tf.reduce_sum(-1 * tf_Y * tf.math.log(tf_Yhat))
        tf_train = tf.train.GradientDescentOptimizer(learning_rate=learning_rate).minimize(
            tf_cost)

        training_accuracies = []
        test_accuracies = []
        epoches = []

        with tf.Session() as session:
            session.run(tf.global_variables_initializer())

            nBatches = np.math.ceil(N / batchsz)

            for i in range(epoch):
                epoches.append(i)
                # Xtrain, Ytrain = sklearn.utils.shuffle(Xtrain, Ytrain)

                for j in range(nBatches):
                    lower = j * batchsz
                    upper = np.min([(j + 1) * batchsz, N])

                    session.run(tf_train,
                                feed_dict={tf_X: Xtrain[lower:upper], tf_Y: Ytrain[lower: upper]})

                test_error, Yhat = session.run([tf_cost, tf_Yhat], feed_dict={tf_X: Xtest, tf_Y: Ytest})
                test_accuracy = self.score(Ytest, Yhat)
                test_accuracies.append(test_accuracy)

                Yhat = session.run(tf_Yhat, feed_dict={tf_X: Xtrain, tf_Y: Ytrain})
                training_accuracy = self.score(Ytrain, Yhat)
                training_accuracies.append(training_accuracy)

                print('Epoch ' + str(i) + ' / test error = ' + str(test_error / Xtest.shape[0])
                      + ' / training_accuracy = ' + str(training_accuracy)
                      + ' / test_accuracy = ' + str(test_accuracy))

        # plot
        print(training_accuracies)
        print(test_accuracies)

        plt.plot(epoches, training_accuracies)
        plt.plot(epoches, test_accuracies)
        plt.xlabel('epoch')
        plt.ylabel('accuracy')
        plt.grid(True)
        plt.title('Accuracy')
        plt.legend()
        plt.show()
Ejemplo n.º 44
0
    be_long_count = valData['beLong'].value_counts()
    print(be_long_count)
    print("out of ", nrows)

    valData = valData.drop(
        ['Open', 'High', 'Low', 'Close', 'Symbol', 'percReturn'], axis=1)

    if pleasePlot:
        plotTitle = issue + ", " + str(modelStartDate) + " to " + str(
            modelEndDate)
        plotIt.plot_v2x(valData['Pri'], valData['beLong'], plotTitle)
        plotIt.histogram(valData['beLong'],
                         x_label="beLong signal",
                         y_label="Frequency",
                         title="beLong distribution for " + issue)
        plt.show(block=False)

    valModelData = valData.drop(['Pri', 'beLong', 'gainAhead'], axis=1)

    valRows = valModelData.shape[0]
    print("There are %i data points" % valRows)

    # test the validation data
    y_validate = []
    y_validate = model.predict(valModelData)

    # Create best estimate of trades
    bestEstimate = np.zeros(valRows)

    # You may need to adjust for the first and / or final entry
    for i in range(valRows - 1):
Ejemplo n.º 45
0
def draw(graph,
         title=None,
         layout=None,
         filename=None,
         return_ax=False,
         pos=None,
         font_size=9,
         alpha=1.0,
         label_shift=(0, 0),
         truncate_labels=10):
    """Graph drawing made a bit easier
    
    Parameters:
        :graph (Graph): input graph, has to be generated via kegg_link_graph()
        :layout (str): layout type, choose from 'bipartite_layout',\
        'circular_layout','kamada_kawai_layout','random_layout',\ 'shell_layout',\
        'spring_layout','spectral_layout'
        :filename (str): if a filename is selected saves the plot as filename.png
        :title (str): title for the graph
        :return_ax: if True returns ax for plot
        
    Returns:
        :ax (list): optional ax for the plot


        """
    default_layout = "spring_layout"
    if layout is None:
        layout = default_layout

    node_groups = {}

    graph_nodetypes = get_unique_nodetypes(graph)

    base_colors = list(mplcolors.BASE_COLORS.keys())

    for i, nodetype in enumerate(graph_nodetypes):
        node_group = (get_nodes_by_nodetype(graph, nodetype,
                                            return_dict=True).keys())
        node_groups.update({nodetype: (node_group, base_colors[i])})

    if title is None:
        if len(graph_nodetypes) == 1:
            title = "{} graph".format(graph_nodetypes[0])
        if len(graph_nodetypes) == 2:
            title = "{} > {} graph".format(graph_nodetypes[1],
                                           graph_nodetypes[0])
        else:
            title = "Graph plot"

    layouts = {
        "circular_layout": nx.circular_layout,
        "kamada_kawai_layout": nx.kamada_kawai_layout,
        "random_layout": nx.random_layout,
        "shell_layout": nx.shell_layout,
        "spring_layout": nx.spring_layout,
        "spectral_layout": nx.spectral_layout,
    }

    if layout not in layouts:
        logging.warning(
            "layout {} not valid: using {} layout\nusing default layout".
            format(layout, default_layout))
        layout = default_layout

    plt.figure()

    if pos is None:
        output_layout = layouts[layout](graph)
        pos = {}
        for key, value in output_layout.items():
            pos[key] = tuple(value)

    for nodetype, node_group in node_groups.items():
        nx.draw_networkx(graph,
                         nodelist=node_group[0],
                         pos=pos,
                         node_color=node_group[1],
                         with_labels=False,
                         label=nodetype)

    nx.draw_networkx_edges(graph, pos)
    pos_labels = shift_pos(pos, label_shift)

    candidate_labels = nx.get_node_attributes(graph, "label")

    if candidate_labels != {}:
        if truncate_labels != False:
            labels = shorten_labels(candidate_labels, truncate_labels)
        else:
            labels = candidate_labels
    else:
        #        labels = None
        nodelist = list(graph.nodes)
        labels = dict(zip(nodelist, nodelist))

    nx.draw_networkx_labels(graph,
                            pos_labels,
                            labels=labels,
                            font_size=font_size,
                            alpha=alpha)

    plt.legend()
    if title is not None:
        plt.title(title)

    plt.axis("off")

    if filename is not None:
        plt.savefig("output.png")

    plt.show()

    if return_ax:
        ax = plt.gca()

        return ax
Ejemplo n.º 46
0
def ensemble_plot_all_roc_curve(fprs, tprs, model_name):
    """Plot the Receiver Operating Characteristic from a list
    of true positive rates and false positive rates."""

    # Initialize useful lists + the plot axes.
    tprs_interp = []
    aucs = []
    mean_fpr = np.linspace(0, 1, 100)
    f, ax = plt.subplots(figsize=(7, 7))  # (14, 10)

    # Plot ROC for each K-Fold + compute AUC scores.
    for i, (fpr, tpr) in enumerate(zip(fprs, tprs)):
        tprs_interp.append(np.interp(mean_fpr, fpr, tpr))
        tprs_interp[-1][0] = 0.0
        roc_auc = auc(fpr, tpr)
        aucs.append(roc_auc)
        ax.plot(fpr,
                tpr,
                lw=1,
                alpha=0.5,
                label='ROC fold %d (AUC = %0.2f)' % (i, roc_auc))

    # Plot the luck line.
    plt.plot([0, 1], [0, 1],
             linestyle='--',
             lw=2,
             color='r',
             label='Random Case',
             alpha=.8)

    # Plot the mean ROC.
    mean_tpr = np.mean(tprs_interp, axis=0)
    mean_tpr[-1] = 1.0
    mean_auc = auc(mean_fpr, mean_tpr)
    std_auc = np.std(aucs)
    ax.plot(mean_fpr,
            mean_tpr,
            color='green',
            label=r'Mean ROC (AUC = %0.2f $\pm$ %0.2f)' % (mean_auc, std_auc),
            lw=2,
            alpha=.9)

    # Plot the standard deviation around the mean ROC.
    std_tpr = np.std(tprs_interp, axis=0)
    tprs_upper = np.minimum(mean_tpr + std_tpr, 1)
    tprs_lower = np.maximum(mean_tpr - std_tpr, 0)
    ax.fill_between(mean_fpr,
                    tprs_lower,
                    tprs_upper,
                    color='grey',
                    alpha=.3,
                    label=r'$\pm$ 1 std. dev.')

    # Fine tune and show the plot.
    ax.set_xlim([-0.05, 1.05])
    ax.set_ylim([-0.05, 1.05])
    ax.set_xlabel('False Positive Rate')
    ax.set_ylabel('True Positive Rate')
    ax.set_title('Receiver Operating Characteristic: ' + r"$\bf{" +
                 model_name + "}$")
    ax.legend(loc="lower right")
    plt.show()
    return f, ax
Ejemplo n.º 47
0
def main():
    
    f = open("PDBbind2015_refined-core.dat")
    line = f.readline()

    x = []
    y = []

    datalist = []

    while 1:

        line = f.readline().strip("\n").split(" ")

        if line == ['']:
            break
        y.append(float(line[0]))
        x.append(float(line[1]))
        
        datalist.append([float(line[2]), float(line[3]), float(line[4]), float(line[5]), float(line[6])])


    data = np.array(datalist)
    #sdata = preprocessing.scale(data)
    data = preprocessing.normalize(data)

    X = data
    Y = np.array(y)

    x_train, x_test, y_train, y_test = train_test_split(X, Y, test_size=0.2, random_state=4)

    # Fit regression models

    # Linear Regression
    m_lr = linear_model.LinearRegression()
    kfold = KFold(n_splits=10, random_state=21)
    cv_result = cross_val_score(m_lr, x_train, y_train, cv=kfold, scoring='neg_mean_squared_error')
    print('Linear Regression CVE = ',cv_result.mean(),',',cv_result.std())


    # Ridge Regression
    m_rg = linear_model.Ridge()
    kfold = KFold(n_splits=10, random_state=21)
    cv_result = cross_val_score(m_rg, x_train, y_train, cv=kfold, scoring='neg_mean_squared_error')
    print('Ridge Regression CVE = ',cv_result.mean(),',',cv_result.std())
    
    # Lasso 
    m_lasso = linear_model.Lasso()
    kfold = KFold(n_splits=10, random_state=21)
    cv_result = cross_val_score(m_lasso, x_train, y_train, cv=kfold, scoring='neg_mean_squared_error')
    print('Lasso Regression CVE = ',cv_result.mean(),',',cv_result.std())   

    # Elastic Net
    m_net = linear_model.ElasticNet()
    kfold = KFold(n_splits=10, random_state=21)
    cv_result = cross_val_score(m_net, x_train, y_train, cv=kfold, scoring='neg_mean_squared_error')
    print('Elastic Net CVE = ',cv_result.mean(),',',cv_result.std())

    # Polynomial Fit
    svr_poly = SVR(kernel='poly'  , C=1e3, degree=2)
    kfold = KFold(n_splits=10, random_state=21)
    cv_result = cross_val_score(svr_poly, x_train, y_train, cv=kfold, scoring='neg_mean_squared_error')
    print('Poly CVE = ',cv_result.mean(),',',cv_result.std())


    # Exponential Fit
    svr_rbf  = SVR(kernel='rbf'   , C=1e3, gamma=0.1)
    kfold = KFold(n_splits=10, random_state=21)
    cv_result = cross_val_score(svr_rbf, x_train, y_train, cv=kfold, scoring='neg_mean_squared_error')
    print('RBF CVE = ',cv_result.mean(),',',cv_result.std())

    #for correlation plot
    x_train = pd.DataFrame(x_train)
    corr_df = x_train.corr(method='pearson')

    mask = np.zeros_like(corr_df)
    mask[np.triu_indices_from(mask)] = True

    seaborn.heatmap(corr_df, cmap='RdYlGn_r', vmax=1.0, vmin=-1.0, mask=mask, linewidths=2.5)
    plt.yticks(rotation=0)
    plt.xticks(rotation=90)
    plt.show()
Ejemplo n.º 48
0
def imshow(im):
    # im = cv2.cvtColor(im, cv2.COLOR_BGR2RGB)
    # Image.fromarray(im).show()
    plt.figure()
    plt.imshow(im, cmap=plt.cm.gray)
    plt.show()
Ejemplo n.º 49
0
import matplotlib.pylab as pyl
import numpy as np

#子图

#第一个图
pyl.subplot(2, 2, 1)  #(行,列,当前区域)
x = [1, 2, 3, 4, 5]
y = [1, 2, 3, 4, 5]
pyl.plot(x, y)

#第二个图
pyl.subplot(2, 2, 2)

#第三个图
pyl.subplot(2, 1, 2)

pyl.show()
Ejemplo n.º 50
0
def add_calibration(data, imagestretch='linear'):
    """
    add calibration results to website
    """

    ### produce calibration plot for each frame
    for idx, cat in enumerate(data['catalogs']):
        if not data['zeropoints'][idx]['success']:
            continue
        ax1 = plt.subplot(211)
        ax1.set_title('%s: %s-band from %s' % 
                      (cat.catalogname, data['filtername'], 
                       data['ref_cat'].catalogname))
        ax1.set_xlabel('Number of Reference Stars')
        ax1.set_ylabel('Magnitude Zeropoint', fontdict={'color':'red'})
        #ax1.ticklabel_format(style='sci', axis='y', scilimits=(-5,5))

        zp_idx = data['zeropoints'][idx]['zp_idx']
        clipping_steps = data['zeropoints'][idx]['clipping_steps'] 
        
        x = [len(clipping_steps[i][3]) for i in range(len(clipping_steps))]

        ax1.errorbar(x, [clipping_steps[i][0] for i
                         in range(len(clipping_steps))],
                     yerr=[clipping_steps[i][1] for i
                           in range(len(clipping_steps))], color='red')
        ax1.set_ylim(ax1.get_ylim()[::-1]) # reverse y axis
        ax1.plot([len(clipping_steps[zp_idx][3]), 
                  len(clipping_steps[zp_idx][3])],
                 ax1.get_ylim(), color='black') 

        ax2 = ax1.twinx()
        ax2.plot(x, [clipping_steps[i][2] for i
                     in range(len(clipping_steps))],
                 color='blue')
        ax2.set_ylabel(r'reduced $\chi^2$', fontdict={'color':'blue'})
        ax2.set_yscale('log')
            
        # residual plot
        ax3 = plt.subplot(212)
        ax3.set_xlabel('Reference Star Magnitude')
        ax3.set_ylabel('Calibration-Reference (mag)')
            
        match = data['zeropoints'][idx]['match']
        x             = match[0][0][clipping_steps[zp_idx][3]]
        residuals     = match[1][0][clipping_steps[zp_idx][3]] \
                        + clipping_steps[zp_idx][0] \
                        - match[0][0][clipping_steps[zp_idx][3]] 
        residuals_sig = numpy.sqrt(match[1][1][clipping_steps[zp_idx][3]]**2\
                                   + clipping_steps[zp_idx][1]**2)

        ax3.errorbar(x, residuals, yerr=residuals_sig, color='black',
                     linestyle='')
        ax3.plot(ax3.get_xlim(), [0,0], color='black', linestyle='--')
        ax3.set_ylim(ax3.get_ylim()[::-1]) # reverse y axis  

        plt.grid()
        plt.savefig(('.diagnostics/%s_photcal.png') % cat.catalogname,
                    format='png')
        data['zeropoints'][idx]['plotfilename'] = \
                                        ('.diagnostics/%s_photcal.png') % \
                                        cat.catalogname
        plt.close()

            
    ### create zeropoint overview plot
    times = [dat['obstime'][0] for dat in data['zeropoints']]
    zp    = [dat['zp'] for dat in data['zeropoints']]
    zperr = [dat['zp_sig'] for dat in data['zeropoints']]

    plt.subplot()
    plt.errorbar(times, zp, yerr=zperr, linestyle='')
    plt.xlabel('Observation Midtime (JD)')
    plt.ylabel('Magnitude Zeropoints (mag)')
    plt.show()
    plt.ylim([plt.ylim()[1], plt.ylim()[0]])
    plt.grid()
    plt.savefig('.diagnostics/zeropoints.png', format='png')
    plt.close()
    data['zpplot'] = 'zeropoints.png'


    ### create calibration website
    html  = "<H2>Calibration Results</H2>\n"
    html += ("<P>Calibration input: minimum number/fraction of reference " \
             + "stars %.2f, reference catalog: %s, filter name: %s\n") % \
        (data['minstars'], data['ref_cat'].catalogname, data['filtername'])
    html += "<TABLE BORDER=\"1\">\n<TR>\n"
    html += "<TH>Filename</TH><TH>Zeropoint (mag)</TH><TH>ZP_sigma (mag)</TH>" \
            + "<TH>N_stars</TH><TH>N_matched</TH>\n</TR>\n"
    for dat in data['zeropoints']:
        if 'plotfilename' in list(dat.keys()):
            html += ("<TR><TD><A HREF=\"#%s\">%s</A></TD>" \
                     + "<TD>%7.4f</TD><TD>%7.4f</TD><TD>%d</TD>" \
                     + "<TD>%d</TD>\n</TR>" ) % \
                (dat['plotfilename'].split('.diagnostics/')[1], 
                 dat['filename'], dat['zp'],
                 dat['zp_sig'], dat['zp_nstars'],
                 len(dat['match'][0][0]))
    html += "</TABLE>\n"
    html += "<P><IMG SRC=\"%s\">" % data['zpplot']
    for dat in data['zeropoints']:
        if not dat['success']:
            continue
        catframe = '.diagnostics/'+ \
                   dat['filename'][:dat['filename'].find('.ldac')] + \
                   '.fits_reference_stars.png'
        html += ("<H3>%s</H3>" \
                 + "<TABLE BORDER=\"0\">\n" \
                 + "<TR><TD><A HREF=\"%s\">" \
                 + "<IMG ID=\"%s\" SRC=\"%s\" HEIGHT=300 WIDTH=400>" \
                 + "</A></TD><TD><A HREF=\"%s\">" \
                 + "<IMG ID=\"%s\" SRC=\"%s\" HEIGHT=400 WIDTH=400>" \
                 + "</A></TD>\n") % \
                (dat['filename'],
                 dat['plotfilename'].split('.diagnostics/')[1], 
                 dat['plotfilename'].split('.diagnostics/')[1],
                 dat['plotfilename'].split('.diagnostics/')[1], 
                 catframe.split('.diagnostics/')[1], 
                 catframe.split('.diagnostics/')[1], 
                 catframe.split('.diagnostics/')[1])
        html += "<TD><TABLE BORDER=\"1\">\n<TR>\n"
        html += "<TH>Idx</TH><TH>Name</TH><TH>RA</TH><TH>Dec</TH>" \
                + "<TH>Catalog (mag)</TH>" \
                + "<TH>Instrumental (mag)</TH><TH>Calibrated (mag)</TH>" \
                + "<TH>Residual (mag</TH>\n</TR>\n"
        for i, idx in enumerate(dat['zp_usedstars']):
            name = dat['match'][0][2][idx]
            if isinstance(name, bytes):
                name = name.decode('utf8')
            html += ("<TR><TD>%d</TD><TD>%s</TD><TD>%12.8f</TD>" \
                     + "<TD>%12.8f</TD><TD>%.3f+-%.3f</TD>" \
                     + "<TD>%.3f+-%.3f</TD>" \
                     + "<TD>%.3f+-%.3f</TD><TD>%.3f</TD></TR>") % \
                (i+1, name,
                 dat['match'][0][3][idx],
                 dat['match'][0][4][idx], dat['match'][0][0][idx], 
                 dat['match'][0][1][idx],
                 dat['match'][1][0][idx], dat['match'][1][1][idx],
                 dat['zp']+dat['match'][1][0][idx], 
                 numpy.sqrt(dat['zp_sig']**2 + dat['match'][1][1][idx]**2),
                 (dat['zp']+dat['match'][1][0][idx])-dat['match'][0][0][idx])
        html += "</TABLE><P>derived zeropoint: %7.4f+-%6.4f mag\n" % \
                (dat['zp'], dat['zp_sig'])
        html += "</TR></TD></TR></TABLE>\n"

        ### create catalog frame
        fits_filename = dat['filename'][:dat['filename'].find('.ldac')] + \
                        '.fits'
        imgdat = fits.open(fits_filename, ignore_missing_end=True)[0].data
        resize_factor = min(1., 1000./numpy.max(imgdat.shape))
        # clip extreme values to prevent crash of imresize
        imgdat = numpy.clip(imgdat, numpy.percentile(imgdat, 1),
                            numpy.percentile(imgdat, 99))
        imgdat = imresize(imgdat, resize_factor, interp='nearest')
        header = fits.open(fits_filename, ignore_missing_end=True)[0].header

        norm = ImageNormalize(imgdat, interval=ZScaleInterval(),
                      stretch={'linear': LinearStretch(),
                               'log': LogStretch()}[imagestretch])
        
        # turn relevant header keys into floats
        # astropy.io.fits bug
        for key, val in list(header.items()):
            if 'CD1_' in key or 'CD2_' in key or \
               'CRVAL' in key or 'CRPIX' in key or \
               'EQUINOX' in key:
                header[key] = float(val)
                
        plt.figure(figsize=(5, 5))
        img = plt.imshow(imgdat, cmap='gray', norm=norm,
                         origin='lower')

        # remove axes
        plt.axis('off')
        img.axes.get_xaxis().set_visible(False)
        img.axes.get_yaxis().set_visible(False)

        # plot reference sources
        if len(dat['match'][0][3]) > 0 and len(dat['match'][0][4]) > 0:
            try:
                w = wcs.WCS(header)
                world_coo = [[dat['match'][0][3][idx],
                              dat['match'][0][4][idx]] \
                             for idx in dat['zp_usedstars']]
                img_coo = w.wcs_world2pix(world_coo, True )

                plt.scatter([c[0]*resize_factor for c in img_coo],
                            [c[1]*resize_factor for c in img_coo], 
                            s=10, marker='o', edgecolors='red', linewidth=0.1,
                            facecolor='none')
                for i in range(len(dat['zp_usedstars'])):
                    plt.annotate(str(i+1), xy=((img_coo[i][0]*resize_factor)+15,
                                               img_coo[i][1]*resize_factor), 
                                 color='red', horizontalalignment='left',
                                 verticalalignment='center')
            except astropy.wcs._wcs.InvalidTransformError:
                logging.error('could not plot reference sources due to '
                              'astropy.wcs._wcs.InvalidTransformError; '
                              'most likely unknown distortion parameters.')

                
        plt.savefig(catframe, format='png', bbox_inches='tight', 
                    pad_inches=0, dpi=200)
        plt.close()

    create_website(_pp_conf.cal_filename, content=html)

    ### update index.html 
    html  = "<H2>Photometric Calibration - Zeropoints</H2>\n"
    html += "match image data with %s (%s);\n" % \
            (data['ref_cat'].catalogname, data['ref_cat'].history)
    html += "see <A HREF=\"%s\">calibration</A> website for details\n" % \
            _pp_conf.cal_filename
    html += "<P><IMG SRC=\"%s\">\n" % ('.diagnostics/' + data['zpplot'])

    append_website(_pp_conf.index_filename, html,
                   replace_below=("<H2>Photometric Calibration "
                                  "- Zeropoints</H2>\n"))

    return None
Ejemplo n.º 51
0
def plti(im,**kwargs):
    plt.imshow(im,interpolation='none',**kwargs)
    plt.axis('off')
    plt.savefig('d:/new.jpeg')
    plt.show()
def GraphicInterface(category, num):

    s_date = '10/1/2008'
    e_date = '11/15/2018'

    # Retrieve Auto industry: 'GM' , 'F' , 'TM' , 'TSLA’ ,'HMC'
    gm = web.DataReader('GM', data_source='yahoo', start=s_date, end=e_date)
    f = web.DataReader('F', data_source='yahoo', start=s_date, end=e_date)
    tm = web.DataReader('TM', data_source='yahoo', start=s_date, end=e_date)
    tsla = web.DataReader('TSLA', data_source='yahoo', start=s_date, end=e_date)
    hmc = web.DataReader('HMC', data_source='yahoo', start=s_date, end=e_date)

    # Retrieve Bank industry: 'JPM' , 'BAC' , 'HSBC' , 'C’ (Citi group) 'GS’ (Goldman Sach)
    jpm = web.DataReader('JPM', data_source='yahoo', start=s_date, end=e_date)
    bac = web.DataReader('BAC', data_source='yahoo', start=s_date, end=e_date)
    hsbc = web.DataReader('HSBC', data_source='yahoo', start=s_date, end=e_date)
    c = web.DataReader('C', data_source='yahoo', start=s_date, end=e_date)
    gs = web.DataReader('GS', data_source='yahoo', start=s_date, end=e_date)

    # Retrieve Retail industry: ‘WMT’, ‘TGT’, ‘JCP’, ‘HD’,'COST'
    wmt = web.DataReader('WMT', data_source='yahoo', start=s_date, end=e_date)
    tgt = web.DataReader('TGT', data_source='yahoo', start=s_date, end=e_date)
    jcp = web.DataReader('JCP', data_source='yahoo', start=s_date, end=e_date)
    hd = web.DataReader('HD', data_source='yahoo', start=s_date, end=e_date)
    cost = web.DataReader('COST', data_source='yahoo', start=s_date, end=e_date)

    # Retrieve IT industry: 'AAPL','MSFT','AMZN','GOOG','FB','intc'
    aapl = web.DataReader('AAPL', data_source='yahoo', start=s_date, end=e_date)
    msft = web.DataReader('MSFT', data_source='yahoo', start=s_date, end=e_date)
    amzn = web.DataReader('AMZN', data_source='yahoo', start=s_date, end=e_date)
    goog = web.DataReader('GOOG', data_source='yahoo', start=s_date, end=e_date)
    fb = web.DataReader('FB', data_source='yahoo', start=s_date, end=e_date)
    intc = web.DataReader('INTC', data_source='yahoo', start=s_date, end=e_date)

    # Fashion FASHION industry: 'tpr', 'hmb', 'ges', 'mc', 'tif'
    tpr = web.DataReader('TPR', data_source='yahoo', start=s_date, end=e_date)
    hmb = web.DataReader('HM-B.ST', data_source='yahoo', start=s_date, end=e_date)
    ges = web.DataReader('GES', data_source='yahoo', start=s_date, end=e_date)
    mc = web.DataReader('MC', data_source='yahoo', start=s_date, end=e_date)
    tif = web.DataReader('TIF', data_source='yahoo', start=s_date, end=e_date)

    AUTO_name = ['GM', 'F', 'TM', 'TSLA', 'HMC']
    BANK_name = ['JPM', 'BAC', 'HSBC', 'C', 'GS']
    RETAIL_name = ['WMT', 'TGT', 'JCP', 'HD', 'COST']
    IT_name = ['AAPL', 'MSFT', 'AMZN', 'GOOG', 'intc']
    FASHION_name = ['tpr', 'hmb', 'ges', 'mc', 'tif']

    AUTO = [gm, f, tm, tsla, hmc]
    BANK = [jpm, bac, hsbc, c, gs]
    RETAIL = [wmt, tgt, jcp, hd, cost]
    IT = [aapl, msft, amzn, goog, intc]
    FASHION = [tpr, hmb, ges, mc, tif]
    
    stocks = []
    stocks_name = []
    
    for i in category:
        for d in range(num):
            if str(i) == 'A':
                random.shuffle(AUTO)
                stocks.append(AUTO[d])
                stocks_name.append((AUTO_name[d]))
            if str(i) == 'B':
                random.shuffle(BANK)
                stocks.append(BANK[d])
                stocks_name.append((BANK_name[d]))
            if str(i) == 'I':
                random.shuffle(IT)
                stocks.append(IT[d])
                stocks_name.append((IT_name[d]))
            if str(i) == 'R':
                random.shuffle(RETAIL)
                stocks.append(RETAIL[d])
                stocks_name.append((IT_name[d]))
            if str(i) == 'F':
                random.shuffle(FASHION)
                stocks.append(FASHION[d])
                stocks_name.append((IT_name[d]))
    
    # colors = ['y-', 'b-', 'g-', 'k-','r-','c-','m-']
    # fig = pylab.figure(figsize = (10,8))
    for i in range(len(stocks)):
        pylab.plot(stocks[i]['Adj Close'], linewidth=1.5)

    pylab.legend(stocks_name, loc='upper right', shadow=True)
    pylab.ylabel('Adjusted Close Price')
    pylab.title('Adjusted Close Price from 2008 to 2018')
    pylab.grid('on')
    pylab.show()
Ejemplo n.º 53
0
def clean_data_BDC(df, lalo, d, mean_std, IQR, titlelabel):
    df.loc[:, 'flag_spike'] = 1
    df_2 = df[df['temp_logged'] > -3]  # second filter

    #     plot_BDC(df,df_2,lalo,d,titlelabel,'Plot after second filter',IQR)

    print(mean_std, IQR.mean(), IQR.max())
    if IQR.std() > 1:
        df_3 = df_2[(df_2['IQR'] < 5) & (df_2['stdday'] < 1)]  # third filter
        #         plot_BDC(df,df_3,lalo,d,titlelabel,'Plot after third filter',IQR)
        df_3['std'] = df_3['temp_logged'].rolling(5, center=True, min_periods=1).std()
        df_4 = df_3[df_3[
                        'std'] < 0.8]  # fourth filter rolls again 5 consecutives rows with cleaned data from the previousL filters
    #         plot_BDC(df,df_4,lalo,d,titlelabel,'Plot after fourth filter',IQR)
    else:
        df_4 = df_2[df_2['std'] < 1]  # third filter
    #         plot_BDC(df,df_4,lalo,d,titlelabel,'Plot after third filter',IQR)

    idx_clean = df_4.index
    df.loc[~df.index.isin(idx_clean), 'flag_spike'] = 4

    df_4['std'] = df_4['temp_logged'].rolling(5, center=True, min_periods=1).std()

    #     df_5 = df_4[df_4['std'] < df_4['std'].quantile(0.98)] # fourth filter to smooth the data even more

    #     plot_BDC(df,df_5,lalo,d,titlelabel,'Plot after fifth filter',IQR)

    if IQR.std() > 1:
        df_5 = df_4[df_4['std'] < df_4['std'].quantile(0.98)]  # fourth filter to smooth the data even more
        #         plot_BDC(df,df_5,lalo,d,titlelabel,'Plot after fifth filter',IQR)
        df_5.loc[:, 'gap_time'] = df_5.index  # - df_5.index.shift(1)
        df_5['first_time'] = (df_5['gap_time'].shift(-1) - df_5['gap_time']).dt.total_seconds() / 3600 / 24
        df_5['last_time'] = (df_5['gap_time'] - df_5['gap_time'].shift(1)).dt.total_seconds() / 3600 / 24

        #         display(df_5[df_5['first_time'] > 1])

        first_date = df_5[df_5['first_time'] > 1.8].index[0]
        last_date = df_5[df_5['last_time'] > 1.8].index[-1] + timedelta(hours=10)
        df_5 = df_5[(df_5.index <= first_date) | (last_date <= df_5.index)]

    else:
        df['temp_med'] = abs(df_4['temp_logged'] - df_4['temp_logged'].rolling(5, center=True, min_periods=1).median())
        df_4['temp_med'] = abs(
            df_4['temp_logged'] - df_4['temp_logged'].rolling(5, center=True, min_periods=1).median())
        #         df_4['std_med'] = df_4['temp_med'].rolling(5, center=True, min_periods=1).std()

        fig, ax = plt.subplots(figsize=(15, 9))
        ax.plot(df_4.index, df_4['temp_logged'], color='b', alpha=0.5,
                label="std data")  # plots degF on right hand side
        #         ax.plot(FFC_final.index,FFC_final['std'].rolling(5, center=True, min_periods=1).median(),color='black', label="data")# plots degF on right hand side
        ax.plot(df_4.index, df_4['temp_med'], color='r', label="IQR data", zorder=10)  # plots degF on right hand side

        ax.set_ylabel('std')
        ax.set_xlabel('Time')

        plt.show()

        #         display(df_4['temp_med'].describe())

        print('Value to consider:',
              df_4['temp_med'].max() - (df_4['temp_med'].max() - df_4['temp_med'].quantile(0.98)) / 2)

        if df_4['temp_med'].max() - (df_4['temp_med'].max() - df_4['temp_med'].quantile(0.98)) / 2 > 1:
            df_4 = df_4[df_4['temp_med'] < 1]
        else:
            df_4 = df_4[df_4['temp_med'] < (
                        df_4['temp_med'].max() - (df_4['temp_med'].max() - df_4['temp_med'].quantile(0.98)) / 2)]

        df_5 = df_4.copy()

    idx_clean = df_5.index
    df.loc[(~df.index.isin(idx_clean)) & (df['flag_spike'] != 4), 'flag_spike'] = 3
    return df
def run_agent(par_list, trials=trials, T=T, L=L, ns=ns, na=na):

    #set parameters:
    #obs_unc: observation uncertainty condition
    #state_unc: state transition uncertainty condition
    #goal_pol: evaluate only policies that lead to the goal
    #utility: goal prior, preference p(o)
    obs_unc, state_unc, goal_pol, avg, utility, = par_list
    """
    create matrices
    """

    vals = np.array([1., 2 / 3., 1 / 2., 1. / 2.])

    #generating probability of observations in each state
    A = np.eye(ns)

    #generate horizontal gradient for observation uncertainty condition
    if obs_unc:

        condition = 'obs'

        for s in range(ns):
            x = s // L
            y = s % L

            c = vals[L - y - 1]

            # look for neighbors
            neighbors = []
            if (s - 4) >= 0 and (s - 4) != 11:
                neighbors.append(s - 4)

            if (s % 4) != 0 and (s - 1) != 11:
                neighbors.append(s - 1)

            if (s + 4) <= (ns - 1) and (s + 4) != 11:
                neighbors.append(s + 4)

            if ((s + 1) % 4) != 0 and (s + 1) != 11:
                neighbors.append(s + 1)

            A[s, s] = c
            for n in neighbors:
                A[n, s] = (1 - c) / len(neighbors)

    #state transition generative probability (matrix)
    B = np.zeros((ns, ns, na))

    cert_arr = np.zeros(ns)
    for s in range(ns):
        x = s // L
        y = s % L

        #state uncertainty condition
        if state_unc:
            if (x == 0) or (y == 3):
                c = vals[0]
            elif (x == 1) or (y == 2):
                c = vals[1]
            elif (x == 2) or (y == 1):
                c = vals[2]
            else:
                c = vals[3]

            condition = 'state'

        else:
            c = 1.

        cert_arr[s] = c
        for u in range(na):
            x = s // L + actions[u][0]
            y = s % L + actions[u][1]

            #check if state goes over boundary
            if x < 0:
                x = 0
            elif x == L:
                x = L - 1

            if y < 0:
                y = 0
            elif y == L:
                y = L - 1

            s_new = L * x + y
            if s_new == s:
                B[s, s, u] = 1
            else:
                B[s, s, u] = 1 - c
                B[s_new, s, u] = c
    """
    create environment (grid world)
    """

    environment = env.GridWorld(A, B, trials=trials, T=T)
    """
    create policies
    """

    if goal_pol:
        pol = []
        su = 3
        for p in itertools.product([0, 1], repeat=T - 1):
            if (np.array(p)[0:6].sum() == su) and (np.array(p)[-1] != 1):
                pol.append(list(p))

        pol = np.array(pol) + 2
    else:
        pol = np.array(list(itertools.product(list(range(na)), repeat=T - 1)))

    npi = pol.shape[0]
    """
    set state prior (where agent thinks it starts)
    """

    state_prior = np.zeros((ns))

    state_prior[0] = 1. / 4.
    state_prior[1] = 1. / 4.
    state_prior[4] = 1. / 4.
    state_prior[5] = 1. / 4.
    """
    set action selection method
    """

    if avg:

        sel = 'avg'

        ac_sel = asl.AveragedSelector(trials=trials, T=T, number_of_actions=na)
    else:

        sel = 'max'

        ac_sel = asl.MaxSelector(trials=trials, T=T, number_of_actions=na)


#    ac_sel = asl.AveragedPolicySelector(trials = trials, T = T,
#                                        number_of_policies = npi,
#                                        number_of_actions = na)
    """
    set up agent
    """
    #bethe agent
    if agent == 'bethe':

        agnt = 'bethe'

        # perception and planning

        bayes_prc = prc.BethePerception(A, B, state_prior, utility)

        bayes_pln = agt.BayesianPlanner(bayes_prc,
                                        ac_sel,
                                        trials=trials,
                                        T=T,
                                        prior_states=state_prior,
                                        policies=pol,
                                        number_of_states=ns,
                                        number_of_policies=npi)
    #MF agent
    else:

        agnt = 'mf'

        # perception and planning

        bayes_prc = prc.MFPerception(A, B, state_prior, utility, T=T)

        bayes_pln = agt.BayesianMFPlanner(bayes_prc, [],
                                          ac_sel,
                                          trials=trials,
                                          T=T,
                                          prior_states=state_prior,
                                          policies=pol,
                                          number_of_states=ns,
                                          number_of_policies=npi)
    """
    create world
    """

    w = world.World(environment, bayes_pln, trials=trials, T=T)
    """
    simulate experiment
    """

    w.simulate_experiment()
    """
    plot and evaluate results
    """
    #find successful and unsuccessful runs
    goal = np.argmax(utility)
    successfull = np.where(environment.hidden_states[:, -1] == goal)[0]
    unsuccessfull = np.where(environment.hidden_states[:, -1] != goal)[0]
    total = len(successfull)

    #set up figure
    factor = 2
    fig = plt.figure(figsize=[factor * 5, factor * 5])

    ax = fig.gca()

    #plot start and goal state
    start_goal = np.zeros((L, L))

    start_goal[0, 1] = 1.
    start_goal[-2, -1] = -1.

    u = sns.heatmap(start_goal,
                    vmin=-1,
                    vmax=1,
                    zorder=2,
                    ax=ax,
                    linewidths=2,
                    alpha=0.7,
                    cmap="RdBu_r",
                    xticklabels=False,
                    yticklabels=False,
                    cbar=False)
    ax.invert_yaxis()

    #find paths and count them
    n = np.zeros((ns, na))

    for i in successfull:

        for j in range(T - 1):
            d = environment.hidden_states[i, j +
                                          1] - environment.hidden_states[i, j]
            if d not in [1, -1, 4, -4, 0]:
                print("ERROR: beaming")
            if d == 1:
                n[environment.hidden_states[i, j], 0] += 1
            if d == -1:
                n[environment.hidden_states[i, j] - 1, 0] += 1
            if d == 4:
                n[environment.hidden_states[i, j], 1] += 1
            if d == -4:
                n[environment.hidden_states[i, j] - 4, 1] += 1

    un = np.zeros((ns, na))

    for i in unsuccessfull:

        for j in range(T - 1):
            d = environment.hidden_states[i, j +
                                          1] - environment.hidden_states[i, j]
            if d not in [1, -1, L, -L, 0]:
                print("ERROR: beaming")
            if d == 1:
                un[environment.hidden_states[i, j], 0] += 1
            if d == -1:
                un[environment.hidden_states[i, j] - 1, 0] += 1
            if d == 4:
                un[environment.hidden_states[i, j], 1] += 1
            if d == -4:
                un[environment.hidden_states[i, j] - 4, 1] += 1

    total_num = n.sum() + un.sum()

    if np.any(n > 0):
        n /= total_num

    if np.any(un > 0):
        un /= total_num

    #plotting
    for i in range(ns):

        x = [i % L + .5]
        y = [i // L + .5]

        #plot uncertainties
        if obs_unc:
            plt.plot(x,
                     y,
                     'o',
                     color=(219 / 256, 122 / 256, 147 / 256),
                     markersize=factor * 12 / (A[i, i])**2,
                     alpha=1.)
        if state_unc:
            plt.plot(x,
                     y,
                     'o',
                     color=(100 / 256, 149 / 256, 237 / 256),
                     markersize=factor * 12 / (cert_arr[i])**2,
                     alpha=1.)

        #plot unsuccessful paths
        for j in range(2):

            if un[i, j] > 0.0:
                if j == 0:
                    xp = x + [x[0] + 1]
                    yp = y + [y[0] + 0]
                if j == 1:
                    xp = x + [x[0] + 0]
                    yp = y + [y[0] + 1]

                plt.plot(xp,
                         yp,
                         '-',
                         color='r',
                         linewidth=factor * 30 * un[i, j],
                         zorder=9,
                         alpha=0.6)

    #set plot title
    plt.title("Planning: successful " + str(round(100 * total / trials)) + "%",
              fontsize=factor * 9)

    #plot successful paths on top
    for i in range(ns):

        x = [i % L + .5]
        y = [i // L + .5]

        for j in range(2):

            if n[i, j] > 0.0:
                if j == 0:
                    xp = x + [x[0] + 1]
                    yp = y + [y[0]]
                if j == 1:
                    xp = x + [x[0] + 0]
                    yp = y + [y[0] + 1]
                plt.plot(xp,
                         yp,
                         '-',
                         color='c',
                         linewidth=factor * 30 * n[i, j],
                         zorder=10,
                         alpha=0.6)

    print("percent won", total / trials, "state prior", np.amax(utility))

    plt.show()
    """
    save data
    """

    if save_data:
        jsonpickle_numpy.register_handlers()

        ut = np.amax(utility)
        p_o = '{:02d}'.format(round(ut * 10).astype(int))
        fname = agnt + '_' + condition + '_' + sel + '_initUnc_' + p_o + '.json'
        fname = os.path.join(data_folder, fname)
        pickled = pickle.encode(w)
        with open(fname, 'w') as outfile:
            json.dump(pickled, outfile)
Ejemplo n.º 55
0
                      help="types allowed: css, eigencss, [default: %default]")
    parser.add_option("-n", "--name", dest="scss_name",
                      help="Name for saving scss image", metavar="NAME")

    (options, args) = parser.parse_args(sys.argv)

    if len(args) != 1:
        print len(args), args
        print options
        parser.error('Incorrect number of arguiments, path and method needed')
    else:
        if options.cpath and options.csstype:
            if options.csstype == 'css':
                css = exp_simple_css(options.cpath, 400, 0.1, 5)   
                plt.plot(css)   
                plt.show(block=True)
            else:
                css = exp_eigen_css(options.cpath, 400, 0.1, False)   
                plt.plot(css)   
                plt.show(block=True)

        if options.dpath and options.csstype and options.scss_name:
            print 'scss will be saved as: {0}.npy'.format(options.scss_name)
            scss, maxs = exp_gen_scss(options.dpath, 300, .5, 10)
            np.save(options.scss_name, scss)
            np.save((options.scss_name + '_maxs'), maxs)


    # cpath = '../../../../../mThesis/code/branches/expdata/bunny_side/slice145.txt'
    # css1 = exp_simple_css(cpath, 400, 0.1, 5)
    # css2 = exp_simple_css(cpath, 400, 0.01, 3)
Ejemplo n.º 56
0
    def plot(self, fig_number=322):
        """plot the stored data in figure `fig_number`.

        Dependencies: `matlabplotlib.pylab`
        """
        from matplotlib import pylab
        from matplotlib.pylab import (gca, figure, plot, xlabel, grid,
                                      semilogy, text, draw, show, subplot,
                                      tight_layout, rcParamsDefault, xlim,
                                      ylim)

        def title_(*args, **kwargs):
            kwargs.setdefault('size', rcParamsDefault['axes.labelsize'])
            pylab.title(*args, **kwargs)

        def subtitle(*args, **kwargs):
            kwargs.setdefault('horizontalalignment', 'center')
            text(0.5 * (xlim()[1] - xlim()[0]), 0.9 * ylim()[1], *args,
                 **kwargs)

        def legend_(*args, **kwargs):
            kwargs.setdefault('framealpha', 0.3)
            kwargs.setdefault('fancybox', True)
            kwargs.setdefault('fontsize', rcParamsDefault['font.size'] - 2)
            pylab.legend(*args, **kwargs)

        figure(fig_number)

        dat = self._data  # dictionary with entries as given in __init__
        if not dat:
            return
        try:  # a hack to get the presumable population size lambda
            strpopsize = ' (evaluations / %s)' % str(dat['eval'][-2] -
                                                     dat['eval'][-3])
        except IndexError:
            strpopsize = ''

        # plot fit, Delta fit, sigma
        subplot(221)
        gca().clear()
        if dat['fit'][0] is None:  # plot is fine with None, but comput-
            dat['fit'][0] = dat['fit'][1]  # tations need numbers
            # should be reverted later, but let's be lazy
        assert dat['fit'].count(None) == 0
        fmin = min(dat['fit'])
        imin = dat['fit'].index(fmin)
        dat['fit'][imin] = max(dat['fit']) + 1
        fmin2 = min(dat['fit'])
        dat['fit'][imin] = fmin
        semilogy(dat['iter'],
                 [f - fmin if f - fmin > 1e-19 else None for f in dat['fit']],
                 'c',
                 linewidth=1,
                 label='f-min(f)')
        semilogy(dat['iter'], [
            max((fmin2 - fmin, 1e-19)) if f - fmin <= 1e-19 else None
            for f in dat['fit']
        ], 'C1*')

        semilogy(dat['iter'], [abs(f) for f in dat['fit']],
                 'b',
                 label='abs(f-value)')
        semilogy(dat['iter'], dat['sigma'], 'g', label='sigma')
        semilogy(dat['iter'][imin], abs(fmin), 'r*', label='abs(min(f))')
        if dat['more_data']:
            gca().twinx()
            plot(dat['iter'], dat['more_data'])
        grid(True)
        legend_(*[
            [v[i] for i in [1, 0, 2, 3]]  # just a reordering
            for v in gca().get_legend_handles_labels()
        ])

        # plot xmean
        subplot(222)
        gca().clear()
        plot(dat['iter'], dat['xmean'])
        for i in range(len(dat['xmean'][-1])):
            text(dat['iter'][0], dat['xmean'][0][i], str(i))
            text(dat['iter'][-1], dat['xmean'][-1][i], str(i))
        subtitle('mean solution')
        grid(True)

        # plot squareroot of eigenvalues
        subplot(223)
        gca().clear()
        semilogy(dat['iter'], dat['D'], 'm')
        xlabel('iterations' + strpopsize)
        title_('Axis lengths')
        grid(True)

        # plot stds
        subplot(224)
        # if len(gcf().axes) > 1:
        #     sca(pylab.gcf().axes[1])
        # else:
        #     twinx()
        gca().clear()
        semilogy(dat['iter'], dat['stds'])
        for i in range(len(dat['stds'][-1])):
            text(dat['iter'][-1], dat['stds'][-1][i], str(i))
        title_('Coordinate-wise STDs w/o sigma')
        grid(True)
        xlabel('iterations' + strpopsize)
        _stdout.flush()
        tight_layout()
        draw()
        show()
        CMAESDataLogger.plotted += 1
def plot_example_psds(example, rate):
    """
    This function creates a figure with 4 lines to show the overall psd for 
    the four sleep examples. (Recall row 0 is REM, rows 1-3 are NREM stages 1,
    2 and 3/4)
    """

    pxx_max_1 = []
    pxx_max_2 = []
    pxx_max_3 = []

    example_1_max = []
    example_2_max = []
    example_3_max = []

    example_1_min = []
    example_2_min = []
    example_3_min = []

    pxx0, freq0 = m.psd(example[0], 256, srate)
    pxx1, freq1 = m.psd(example[1], 256, srate)
    pxx2, freq2 = m.psd(example[2], 256, srate)
    pxx3, freq3 = m.psd(example[3], 256, srate)
    pxx0_normalized = pxx0 / sum(pxx0)
    pxx1_normalized = pxx1 / sum(pxx1)
    pxx2_normalized = pxx2 / sum(pxx2)
    pxx3_normalized = pxx3 / sum(pxx3)

    plt.figure()
    plt.plot(freq0, pxx0_normalized, color='k', label='REM sleep')
    plt.plot(freq1, pxx1_normalized, color='r', label='st1 NREM sleep')
    plt.plot(freq2, pxx2_normalized, color='b', label='st2 NREM sleep')
    plt.plot(freq3, pxx3_normalized, color='c', label='st3 NREM sleep')
    plt.xlim(0, 30)
    plt.xlabel('frequency in (HZ)')
    plt.ylabel('Power Spectal Denisty (db/HZ) ')
    plt.title('Psds for 4 stages')
    plt.legend(loc=0)
    plt.show()

    for i in range(10):
        start = i * 3840
        end = (i + 1) * 3840
        pxx, freq = m.psd(example[1][start:end], rate)
        pxx_normalized = pxx / sum(pxx)
        pxx_max_1 = np.append(pxx_max_1, max(pxx_normalized))
        example_1_max = np.append(example_1_max, max(example[1][start:end]))
        example_1_min = np.append(example_1_min, min(example[1][start:end]))
    print[
        max(example_1_min),
        min(example_1_min),
        max(example_1_max),
        min(example_1_max),
        max(pxx_max_1),
        min(pxx_max_1)
    ]

    for i in range(10):
        start = i * 3840
        end = (i + 1) * 3840
        pxx, freq = m.psd(example[2][start:end], rate)
        pxx_normalized = pxx / sum(pxx)
        pxx_max_2 = np.append(pxx_max_2, max(pxx_normalized))
        example_2_max = np.append(example_2_max, max(example[2][start:end]))
        example_2_min = np.append(example_2_min, min(example[2][start:end]))
    print[
        max(example_2_min),
        min(example_2_min),
        max(example_2_max),
        min(example_2_max),
        max(pxx_max_2),
        min(pxx_max_2)
    ]

    for i in range(10):
        start = i * 3840
        end = (i + 1) * 3840
        pxx, freq = m.psd(example[3][start:end], rate)
        pxx_normalized = pxx / sum(pxx)
        pxx_max_3 = np.append(pxx_max_3, max(pxx_normalized))
        example_3_max = np.append(example_3_max, max(example[3][start:end]))
        example_3_min = np.append(example_3_min, min(example[3][start:end]))
    print[
        max(example_3_min),
        min(example_3_min),
        max(example_3_max),
        min(example_3_max),
        max(pxx_max_3),
        min(pxx_max_3)
    ]
Ejemplo n.º 58
0
def plot_BDC(FFC, FFC_clean, lalo, d, title1, title2, IQR):
    def c2f(temp):
        """
        Returns temperature in Celsius.
        """
        return 1.8 * temp + 32

    def convert_ax_f_to_fahrenheit(ax_c):
        """
        Update second axis according with first axis.
        """
        y1, y2 = ax_c.get_ylim()
        ax_f.set_ylim(c2f(y1), c2f(y2))
        ax_f.figure.canvas.draw()

    # plots the original, the cleaned FFC (assumes temperature degC) and returns degF
    # "d" is the previously calculated movement of trap
    # puts the title (assumes fisherman's name)

    raw_data = FFC[FFC[
        'temp_logged'].notnull()]  # pd.read_csv(asc_file,skiprows=skipr,parse_dates={'datet':[0,1]},index_col='datet',date_parser=parse,names=['Date','Time','temp_logged'],encoding= 'unicode_escape')

    fig, ax_c = plt.subplots(figsize=(15, 9))
    ax_f = ax_c.twinx()

    # atuomatically update ylim of ax2 when ylim of ax1 changes
    ax_c.callbacks.connect("ylim_changed", convert_ax_f_to_fahrenheit)
    ax_c.plot(raw_data.index, raw_data['temp_logged'], '-', color='r', label="raw data", zorder=0)

    ax_c.set_ylabel('celsius')
    ax_c.set_xlabel('Time')

    ax_c.set_xlim(min(raw_data.index) - timedelta(hours=5),
                  max(raw_data.index) + timedelta(hours=5))  # limit the plot to logged data
    ax_c.set_ylim(min(raw_data['temp_logged']) - 2, max(raw_data['temp_logged']) + 2)

    #     display(FFC_clean[FFC_clean['flag_spike'] == 3])
    try:
        ax_c.plot(FFC_clean[FFC_clean['flag_spike'] == 1].index, FFC_clean[FFC_clean['flag_spike'] == 1]['temp_logged'],
                  color='b', label="clean data")  # plots degF on right hand side
        ax_c.scatter(FFC_clean[FFC_clean['flag_logged_location'] != 1].index,
                     FFC_clean[FFC_clean['flag_logged_location'] != 1]['temp_logged'], s=10, c='darkred', alpha=0.5,
                     cmap='Wistia', label='flag location not recorded', zorder=100)  # ,zorder=10) # plots flag depth
        ax_c.plot(lalo.index, lalo['temp'], 'c*', label='logged data',
                  zorder=101)  # plots logged data from LatLong file
        ax_c.scatter(FFC_clean[FFC_clean['flag_spike'] != 1].index,
                     FFC_clean[FFC_clean['flag_spike'] != 1]['temp_logged'], c='red', s=10, cmap='Wistia',
                     label="filtered data", zorder=502)
    #             ax_c.scatter(FFC_clean[FFC_clean['flag_bathymetry'] != 1].index,FFC_clean[FFC_clean['flag_bathymetry'] != 1]['temp_logged'] ,c='cyan', alpha=0.1, s=10, cmap='Wistia',label="flag bathymetry",zorder=102)

    except:
        print('No good plot')
        ax_c.plot(FFC_clean.index, FFC_clean['temp_logged'], color='b',
                  label="clean data")  # plots degF on right hand side
        ax_c.plot(lalo.index, lalo['temp'], 'c*', label='logged data',
                  zorder=101)  # plots logged data from LatLong file
        ax_c.scatter(FFC.index, FFC['temp_logged'], c='red', s=10, cmap='Wistia', label="filtered data", zorder=102)

    FT = [FFC_clean['temp_logged']]
    LT = [np.array(lalo['temp'].values)]  # logged temp in degC
    moveid = list(where(array(d) > 1.0)[0])  # movements logged greater than 1km
    dist = 3
    tip = 0.5

    c_move = 0
    for kk in moveid:
        ax_c.annotate('%s' % float('%6.1f' % d[kk]) + ' kms',
                      xy=(lalo.index[kk], LT[0][kk] + tip), xycoords='data',
                      xytext=(lalo.index[kk], LT[0][kk] + dist),
                      arrowprops=dict(facecolor='black', arrowstyle='->',
                                      connectionstyle="angle,angleA=0,angleB=90,rad=10"),
                      horizontalalignment='left', verticalalignment='bottom')
        dist *= -1.2
        tip *= -1
        if c_move > 2:
            if dist > 0:
                dist = 3
            else:
                dist = -3
            c_move = 0
        c_move += 1

    #     per_good = len(FFC_clean[(FFC_clean['flag_spike'] == 1) & (FFC_clean['flag_change_location'] == 1) & (FFC_clean['flag_change_depth'] == 1)])/len(FFC_clean) * 100
    per_good = len(FFC_clean[(FFC_clean['flag_spike'] == 1)]) / len(FFC_clean) * 100
    per_model = len(FFC_clean[FFC_clean['flag_bathy'] == 1]) / len(FFC_clean) * 100

    savefig = True if per_good >= 99 and per_model > 50 and FFC_clean['gebco'].mean() > 0 else False

    plt.suptitle(title1)
    plt.title(title2 + ' and %s' % float('%6.1f' % per_good) + '% of clean data respect to raw data')

    ax_f.set_ylabel('fahrenheit')

    fig.autofmt_xdate()
    ax_c.legend()

    if savefig:
        plt.savefig(path_save + 'Passed/' + year + '_FS' + LFAzone + '_' + str(
            int(FFC_final['gauge'].iloc[0])) + '_' + fisherman.replace(' ', '_') + '.png')
    else:
        plt.savefig(
            path_save + year + '_FS' + LFAzone + '_' + str(int(FFC_clean['gauge'].iloc[0])) + '_' + fisherman.replace(
                ' ', '_') + '.png')

    plt.show()

    # Bathymetry plot

    fig, ax = plt.subplots(figsize=(15, 9))
    ax.plot(FFC_final.index, -FFC_final['depth'], color='b', label="logged data")  # plots degF on right hand side
    ax.plot(FFC_final.index, -FFC_final['gebco'], color='r', label="gebco data")  # plots degF on right hand side

    #         ax.plot(FFC_final.index,FFC_final['ngdc'],color='green',label="ngdc data")# plots degF on right hand side
    ax.set_ylabel('depth (m)')
    ax.set_xlabel('Time')

    #         ax.set_xlim(min(imgtoup.index) - timedelta(hours=5),max(imgtoup.index) + timedelta(hours=5)) # limit the plot to logged data
    min_gebco = -FFC_final['gebco'].max()
    min_depth = -FFC_final['depth'].max()

    ax.set_ylim(min_gebco - 10, 20) if min_gebco < min_depth else ax.set_ylim(
        min_depth - 10, 20)

    plt.legend()

    if savefig:
        plt.savefig(path_save + 'Passed/' + year + '_FS' + LFAzone + '_' + str(
            int(FFC_final['gauge'].iloc[0])) + '_' + fisherman.replace(' ', '_') + '_bathymetry.png')
    else:
        plt.savefig(
            path_save + year + '_FS' + LFAzone + '_' + str(int(FFC_clean['gauge'].iloc[0])) + '_' + fisherman.replace(
                ' ', '_') + '_bathymetry.png')

    plt.show()

    return savefig
Ejemplo n.º 59
0
            pylab.colorbar()
            pylab.title('so')
            pylab.subplot(2, 2, 2)
            pylab.pcolor(soInterpEsmfInterface - soInterpESMP,
                         vmin=-0.5,
                         vmax=0.5)
            pylab.colorbar()
            pylab.title('[%d] EsmfInterface - ESMP' % mype)
            pylab.subplot(2, 2, 3)
            pylab.pcolor(soInterpEsmfInterface, vmin=20.0, vmax=40.0)
            pylab.colorbar()
            pylab.title('[%d] ESMFInterface' % mype)
            pylab.subplot(2, 2, 4)
            pylab.pcolor(soInterpESMP, vmin=20, vmax=40)
            pylab.colorbar()
            pylab.title('[%d] ESMP' % mype)

        # clean up
        ESMP.ESMP_FieldRegridRelease(regrid1)
        ESMP.ESMP_FieldDestroy(dstFld)
        ESMP.ESMP_GridDestroy(dstGrid)
        ESMP.ESMP_FieldDestroy(srcFld)
        ESMP.ESMP_GridDestroy(srcGrid)


if __name__ == '__main__':
    print ""
    suite = unittest.TestLoader().loadTestsFromTestCase(Test)
    unittest.TextTestRunner(verbosity=1).run(suite)
    if PLOT: pylab.show()
Ejemplo n.º 60
0
            a2 = u[i+2,1] + 2.*u[i-1,1] - 2.*u[i+1,1] - u[i-2,1]
        else:  a2 = u[i-1, 1] - u[i+1, 1]  
        a3        = u[i+1, 1] - u[i-1, 1] 
        u[i, 2] = u[i,0] - a1*a3 - 2.*fac*a2/3.
    if j%100 ==  0:                           # Plot every 100 time steps
        for i in range (1, mx - 2): spl[i, m] = u[i, 2]
        print(m)  
        m = m + 1     
    for k in range(0, mx):                  # Recycle array saves memory
        u[k, 0] = u[k, 1]                
        u[k, 1] = u[k, 2] 

x = list(range(0, mx, 2))                        # Plot every other point
y = list(range(0, 21))                  # Plot 21 lines every 100 t steps
X, Y = p.meshgrid(x, y)

def functz(spl):                           
    z = spl[X, Y]       
    return z

fig  = p.figure()                                         # create figure
ax = Axes3D(fig)                                              # plot axes
ax.plot_wireframe(X, Y, spl[X, Y], color = 'r')           # red wireframe
ax.set_xlabel('Positon')                                     # label axes
ax.set_ylabel('Time')
ax.set_zlabel('Disturbance')
p.show()                                # Show figure, close Python shell
print("That's all folks!") 

#三次元の解