コード例 #1
0
ファイル: reduce_facet.py プロジェクト: yupinghuang/pulse_evn
def plot_gauss(no):
    seq = 1
    if no==1:
        n = str(no)
        n = n.zfill(3)
        imtype ='IIM'+n
    elif no<1000:
        n = str(no)
        n = n.zfill(3)
        imtype = 'IIM'+n
    else:
        imtype = 'II'+str(no)

    print 'doing '+imtype
    testImg = WAIPSImage('J0528+2200', imtype, 1, seq,43)
    dat = testImg.pixels.flatten()
    end_1, end_2 = np.array([dat.size*0.1,dat.size*0.9],dtype=int)
    mu=np.mean(dat[end_1:end_2])
    sigma=stats.tstd(dat[end_1:end_2])
    peak = np.max(dat)
    print 'peak:', np.max(dat), 'rms: ', sigma, 'snr: ', peak/sigma
    plt.figure()
    n,bins,patches = plt.hist(dat,100,normed=1,histtype='stepfilled')
    plt.setp(patches,'facecolor','g','alpha',0.75)
    y = plt.normpdf(bins,mu,sigma)
    plt.plot(bins,y,'k--',linewidth=1.5)
    plt.show()
コード例 #2
0
ファイル: barebones.py プロジェクト: stephentu/xpr
def test2():
    mu = 10.0
    sigma = 2.0

    x = Variable("x", float)
    loggauss = -0.5 * math.log( 2.0 * math.pi * sigma * sigma ) - 0.5 * ((x - mu) ** 2) / (sigma * sigma)

    f = Function(
            name="foo",
            params=(x,),
            rettype=float,
            expr=loggauss)
    engine = create_execution_engine()
    module = f.compile(engine)
    func_ptr = engine.get_pointer_to_function(module.get_function("foo"))

    samples = metropolis_hastings(func_ptr, sigma, 0.0, 1000, 2)
    #plt.plot(np.arange(len(samples)), samples)

    n, bins, patches = plt.hist(samples[500:], 25, normed=1, histtype='stepfilled')
    plt.setp(patches, 'facecolor', 'g', 'alpha', 0.75)

    # add a line showing the expected distribution
    y = plt.normpdf(bins, mu, sigma)
    l = plt.plot(bins, y, 'k--', linewidth=1.5)
    plt.show()
コード例 #3
0
def test2():
    mu = 10.0
    sigma = 2.0

    x = Variable("x", float)
    loggauss = -0.5 * math.log(2.0 * math.pi * sigma * sigma) - 0.5 * (
        (x - mu)**2) / (sigma * sigma)

    f = Function(name="foo", params=(x, ), rettype=float, expr=loggauss)
    engine = create_execution_engine()
    module = f.compile(engine)
    func_ptr = engine.get_pointer_to_function(module.get_function("foo"))

    samples = metropolis_hastings(func_ptr, sigma, 0.0, 1000, 2)
    #plt.plot(np.arange(len(samples)), samples)

    n, bins, patches = plt.hist(samples[500:],
                                25,
                                normed=1,
                                histtype='stepfilled')
    plt.setp(patches, 'facecolor', 'g', 'alpha', 0.75)

    # add a line showing the expected distribution
    y = plt.normpdf(bins, mu, sigma)
    l = plt.plot(bins, y, 'k--', linewidth=1.5)
    plt.show()
コード例 #4
0
def returns_histogram(returns):
    (mu, sigma) = norm.fit(returns)
    (n, bins, patches) = plt.hist(returns, 400, normed=1, color='k')
    y = plt.normpdf(bins, mu, sigma)
    plt.plot(bins, y, 'r--', linewidth=1.5)
    plt.xlim(-50, 50)
    plt.title('Histogram of normalized returns')
    plt.savefig(pdf_pages, format='pdf')
    plt.close()
コード例 #5
0
def returns_histogram(returns):
    (mu, sigma) = norm.fit(returns)
    (n, bins, patches) = plt.hist(returns, 400, normed=1, color='k')
    y = plt.normpdf(bins, mu, sigma)
    plt.plot(bins, y, 'r--', linewidth=1.5)
    plt.xlim(-50, 50)
    plt.title('Histogram of normalized returns')
    plt.savefig(pdf_pages, format='pdf')
    plt.close()
コード例 #6
0
    def __init__(self, img):
        """
        get the grid from an opencv2 image object
        """
        self.img = img

        sbd_params = cv2.SimpleBlobDetector_Params()
        for (k, v) in SBD_HOLES.iteritems():
            setattr(sbd_params, k, v)
        sbd = cv2.SimpleBlobDetector(sbd_params)
        keypoints = sbd.detect(img)

        blobs = img.copy()
        for kp in keypoints:
            cv2.circle(blobs, tuple([int(coord) for coord in kp.pt]), 4, 255, 4)

        ###### Identifying the grid spacing #####
        # What we do here is find the distances between all the detected holes in the image. The actual spacing between pins should be the minimum distance observed between holes.

        dists = np.zeros((len(keypoints),len(keypoints)))
        kps = [np.array(p.pt) for p in keypoints]
        for j, pj in enumerate(kps):
            for k, pk in enumerate(kps):
                dists[j,k] = np.linalg.norm(pj - pk)
        upper = np.triu(dists)
        flat = np.array([x for x in upper.reshape((-1,1)) if x > 1])
        n_dist, bins = np.histogram(flat, range=(0,100), bins=400)
        min_spacing = bins[lab.find(np.diff(gaussian_filter(n_dist, 2)) < 0)[0]+1]
        cond = np.where((upper >= min_spacing - 1) * (upper <= min_spacing + 1), 1, 0)
        neighbors = []
        for j, row in enumerate(cond):
            for k, el in enumerate(row):
                if el:
                    # print j, k, keypoints[j].pt, keypoints[k].pt
                    neighbors.append([j, k, keypoints[j].pt, keypoints[k].pt])

        # Draw lines between grid neighbors
        lines = blobs.copy()
        for n in neighbors:
            cv2.line(lines, tuple(map(int, n[2])), tuple(map(int, n[3])), 255)

        # Identify the angle of the grid
        # We find the angle of the grid by looking at the orientation of the lines between adjacent holes in the grid

        angles = np.array([np.arctan2(n[3][1] - n[2][1], n[3][0] - n[2][0]) for n in neighbors])
        angle_step = 0.5
        n_ang, bins = np.histogram((angles % np.pi) * 180 / np.pi, range=(0,180), bins=180./angle_step)


        kernel = np.tile(np.hstack([lab.normpdf(np.array(range(int(45./angle_step))), 0, 10./angle_step), lab.normpdf(np.array(range(int(45./angle_step))), 45./angle_step, 10./angle_step)]), 2)

        angle_fits = np.zeros(int(45./angle_step))
        for t in range(len(angle_fits)):
            angle_fits[t] = np.correlate(np.roll(kernel, t), n_ang)
        grid_angle = (np.argmax(angle_fits) * angle_step + angle_step/2) * np.pi/180
        print grid_angle, grid_angle*180/np.pi

        # Translation of the grid
        # Now we have the spacing and orientation of the grid, so let's find its translation as well. First, let's undo the rotation:
        R = rotmat(-grid_angle)
        kps_rot = [np.array(R * np.reshape(k, (2,-1))) for k in kps]
        # Next, we can find the phase of the x and y position of the grid using mod
        x_mod = [k[0][0] % min_spacing for k in kps_rot]
        y_mod = [k[1][0] % min_spacing for k in kps_rot]
        n_x, bins = np.histogram(x_mod, range=(0,min_spacing), bins=(min_spacing))
        n_y, bins = np.histogram(y_mod, range=(0,min_spacing), bins=(min_spacing))

        grid_orig = np.reshape([np.argmax(gaussian_filter(n_x, 2))+0.5, np.argmax(gaussian_filter(n_y,2))+0.5], (2,-1))

        self.grid_orig = grid_orig
        self.grid_angle = grid_angle
        self.grid_spacing = min_spacing
コード例 #7
0
                                    new_kls_data.replace(' ',''),
                                    model.replace(' ','')))
                plt.close()

                # Make a QQ-plot of residuals to check for normality.
                sm.qqplot(resids, line='s')
                plt.savefig(plots_directory + 'resid_qq_{}_{}_{}.png'
                            .format(wordtype,
                                    new_kls_data.replace(' ',''),
                                    model.replace(' ','')))
                plt.close()

                # Make a histogram of residuals with fitted normal curve
                # to check for normality of residuals.
                (_, bins, _) = plt.hist(resids, 200, normed=1, color='k')
                normal_curve = plt.normpdf(bins, np.mean(resids),
                                           np.std(resids))
                plt.plot(bins, normal_curve, 'r--', linewidth=1.5)
                plt.savefig(plots_directory + 'resid_hist_{}_{}_{}.png'
                            .format(wordtype,
                                    new_kls_data.replace(' ',''),
                                    model.replace(' ','')))
                plt.close()



            # Use a simple t-test to test if the slopes are the same.
            for (i, wordtype_i) in enumerate(wordtypes):
                for (j, wordtype_j) in [ (j,t) for (j,t)
                                         in enumerate(wordtypes) if j>i ]:
                    diff = abs(slopes[wordtype_i] - slopes[wordtype_j])
                    stderr = np.sqrt(stderrs[wordtype_i]**2
コード例 #8
0
def main():
    splits = []
    with open('data/results.json') as fin:
        for line in fin:
            result = json.loads(line)
            splits += result.get('split', [])

    splits = [Struct(**split) for split in splits]

    NUM_BUCKETS = 22

    by_bucket = collections.defaultdict(list)
    for s in splits:
        bucket = int(s.area_fraction * NUM_BUCKETS + 0.5) / NUM_BUCKETS
        by_bucket[bucket].append(s.pop_fraction)

    by_bucket = dict(by_bucket)

    total_variance = 0
    for bucket, xs in sorted(by_bucket.items()):
        if len(xs) < 2:
            continue
        print('{}:   {:.4f} +- {:.4f} ({})'.format(
            bucket,
            statistics.mean(xs), statistics.stdev(xs),
            len(xs)))

        total_variance += statistics.variance(xs) * len(xs) / len(splits)

    print('total stddev', sqrt(total_variance))

    #print(by_bucket[0.3])

    xs = by_bucket[0.0]

    n, bins, patches = pylab.hist(xs, 20, normed=1, histtype='stepfilled')
    pylab.setp(patches, 'facecolor', 'g', 'alpha', 0.75)

    #mu = statistics.mean(xs)
    mu = statistics.mode(int(x * 20 + 0.5) / 20 for x in xs)
    sigma = statistics.pstdev(xs, mu=mu)
    # add a line showing the expected distribution
    y = pylab.normpdf(bins, mu, sigma)
    l = pylab.plot(bins, y, 'k--', linewidth=1.5)

    #by_bucket[x]

    #pylab.scatter(
    #    xs, ys, c=colors,
    #    s=0.1,
    #    linewidths=(0,),
    #    cmap='cool')


    pylab.savefig('pop_fraction.png', dpi=140)


    result = []
    for bucket, xs in sorted(by_bucket.items()):
        N = 36
        xs = sorted(xs)
        #result.append(str([xs[(len(xs) - 1) * i // N] for i in range(N + 1)]))
        result.append(str([
            statistics.mean(xs[len(xs) * i // N : len(xs) * (i + 1) // N])
            for i in range(N)
            ]))

    result = ', '.join(r.replace('[', '{').replace(']', '}') for r in result)
    with open('prediction.h', 'w') as fout:
        fout.write('const vector<vector<double>> prediction = {{ {} }};'.format(result))
コード例 #9
0
        prices.append(new_price)



    # Plot some stuff.
    plt.plot(range(total_time), prices[100: ])
    plt.xlabel('Time')
    plt.ylabel('log(price)')
    plt.savefig(pdf_pages, format='pdf')
    plt.close()

    returns = [ prices[i] - prices[i-1] for i in range(1,len(prices)) ]
    plt.plot(range(total_time-1), returns[100: ])
    plt.xlabel('Time')
    plt.ylabel('log(returns)')
    plt.savefig(pdf_pages, format='pdf')
    plt.close()

    (mu, sigma) = norm.fit(returns)
    (n, bins, patches) = plt.hist(returns, 50, normed=1)
    y = plt.normpdf(bins, mu, sigma)
    plt.plot(bins, y, 'r--', linewidth=1.5)
    plt.title('Histogram of normalized returns')
    plt.savefig(pdf_pages, format='pdf')
    plt.close()



pdf_pages.close()

コード例 #10
0
max_g = max(ganancias)
min_g = min(ganancias)

bins = (max_g - min_g) / 0.1
fig = plt.figure()
a, bins, patches = plt.hist(ganancias,15,range=(365, 366.5), normed=1,facecolor='green',alpha=0.4, label='Medias semanales')
plt.title('Normalidad asintotica de la media muestral')

#Graficamos la funcion de densidad de la normal
mu = np.mean(ganancias)
sigma = np.std(ganancias)

print mu
print sigma**2
bins = [365 + i / 100.0 for i in range(151)]
y = mlab.normpdf(bins, mu, sigma)
plt.plot(bins, y, 'r', color='red', label='Normal')

ax = fig.add_subplot(111)
fig.subplots_adjust(top=0.85)


ax.set_xlabel('Medias Semanales')

def normal(data):
    x = sum(data)/len(data)
    sigma2 = 0
    for xi in data:
        sigma2 += (xi - x)**2 / len(data)
    return (x,sigma2)