示例#1
0
文件: test.py 项目: AdkPete/ASTP720
def T0():
	data = 2
	T = MCMC.MCMC( P0 , data)


	X, MP , ML = T.M_H(Q0 , 10000 , np.array(  [ 10 , 3 ] ) )
	mkp0(X)
示例#2
0
文件: Run.py 项目: DmitriiGudin/MCMC
def PART_1():
    # Generate clean data.
    data_x_1 = np.random.uniform(x_min_1, x_max_1, N_points_1)
    data_y_1 = A_1*data_x_1 + B_1
    # Add noise.
    data_x_1 += lib_distrib.gen_distrib (lib_distrib.Gauss, (0, dx_1), -dx_factor_1*dx_1, dx_factor_1*dx_1, 0, lib_distrib.Gauss(0, (0, dx_1)), N_points_1)
    data_y_1 += lib_distrib.gen_distrib (lib_distrib.Gauss, (0, dy_1), -dy_factor_1*dy_1, dy_factor_1*dy_1, 0, lib_distrib.Gauss(0, (0, dy_1)), N_points_1)
    # Plot the generated data.
    plot_initial_data_1 (data_x_1, data_y_1)
    # Perform the MCMC procedure. 
    data_1 = MCMC.MCMC (linear, (0,0), data_x_1, data_y_1, dp_1, dp_decay_1, N_coarse_1, N_fine_1, N_final_1, N_logging)
    # Retrieve the relevant data (the last N_final_1 batches):
    data_final_1 = data_1[N_coarse_1+N_fine_1+1:N_coarse_1+N_fine_1+N_final_1+1]
    # Calculate and output the optimal A, B values:
    A_1_opt, B_1_opt = np.median([d[1][0] for d in data_final_1]), np.median([d[1][1] for d in data_final_1])
    print "Optimal linear parameter values: A = ", A_1_opt, ", B = ", B_1_opt
    # Calculate the acceptance rates for all 3 modes. Print them out.
    acc_rates = calc_acceptance_rates ([d[2] for d in data_1[1:N_coarse_1+1]], [d[2] for d in data_1[N_coarse_1+1:N_coarse_1+N_fine_1+1]], [d[2] for d in data_final_1])
    print "Acceptance rate for the Coarse mode: ", round(acc_rates[0]*100, 2), "%."
    print "Acceptance rate for the Fine mode: ", round(acc_rates[1]*100, 2), "%."
    print "Acceptance rate for the Final mode: ", round(acc_rates[2]*100, 2), "%."
    # Plot the posterior distribution for A, B and both. Only for the Final mode.
    plot_distr ([[d[1][0] for d in data_final_1], [d[1][1] for d in data_final_1]], ['A','B'], 1)
    # Plot the cumulative sum graph and calculate 68% and 95% confidence intervals.
    plot_cumul ([[d[1][0] for d in data_final_1],[d[1][1] for d in data_final_1]], ['A','B'], 1, [0.68, 0.95])
示例#3
0
文件: RV.py 项目: AdkPete/ASTP720
def measure():

    ##Measures and returns our R and M estimate

    tr = 2454955.788373

    phi = tr * 2 * np.pi / (3.55)

    t, rvel = read_rv()

    DI = 0.0043
    Rstar = 1.79

    Rp = np.sqrt(Rstar**2 * DI)

    print(Rp)

    x0 = [3.55, 4.34505429e+06, 2.50998989e+02]

    opt = MCMC.MCMC(lsq, [t, rvel])
    opt.M_H(Q, 5000, x0)
    print(opt.MAP, opt.MAPL)
    plot_fit(t, rvel, opt.MAP)

    vp = opt.MAP[2] * u.m / u.s

    Mstar = 1.35 * u.M_sun

    pstar = Mstar * vp
    p_planet = pstar

    T = opt.MAP[0] * u.day
    m_planet = T**2 * (p_planet**6) / (4 * np.pi**2 * (const.G * Mstar)**2)
    m_planet = m_planet**(1.0 / 6)
    print(m_planet.to(u.M_sun))
示例#4
0
文件: test.py 项目: AdkPete/ASTP720
def T1():
	
	data = np.random.poisson(3 , 1000)
	T = MCMC.MCMC(P1 , data)
	
	X , MP , ML = T.M_H(Q1 , 10000 , np.array( [ 5 ] ) )
	mkp1(X)
	print (MP , ML)
示例#5
0
def generateMCMC(filename):
    a = mcmc.MCMC()
    a.setSteps(100000)
    a.setSigmas(sigma)
    a.current_point = mean
    a.setLogLikelihoodFunction(logp)
    a.loop()
    f = open(filename, 'wb')
    pickle.dump(a, f)
    f.close()
    return a
示例#6
0
def main():
    while True:
        print("""
        指令列表如下:
        1.查看不同题目作答组合的L曲线
        2.对题目作答进行DSY算法估计
        3.对题目作答进行MCMC算法估计
        4.导入xls类型的作答结果并对其进行分析和处理
        0.退出
        """)
        command = input('请输入需要执行的指令')
        a, b = Init_MIRT.Init_a_b()
        if command == '0':
            print('感谢您的使用')
            break
        elif command == '1':
            time_star=time.time()
            Show_L.m_show(a, b)
            time_end=time.time()
            print("程序运行时间:%.8s s" %(time_end-time_star))
        elif command == '2':
            time_star = time.time()
            MIRT_DSY.DSY(a, b)
            time_end = time.time()
            print("程序运行时间:%.8s s" % (time_end-time_star))
        elif command == '3':
            time_star = time.time()
            MCMC.MCMC(a,b)
            time_end = time.time()
            print("程序运行时间:%.8s s" % (time_end-time_star))
        elif command == '4':
            print("""
            请输入需要处理的文件位置         
            """)
            while True:
                URL=input()
                if URL=='':
                    print("请输入有效地址")
                else:
                    print('测试完成,本功能还在开发中')
                    break
        print('finish')
        KEY=input('继续执行请按1,停止执行请按0')
        if (KEY!='1'):
            break
示例#7
0
文件: Run.py 项目: DmitriiGudin/MCMC
def PART_2():
    # Retrieve the data set.
    data_x_2 = Data_part_2.x
    data_y_2 = Data_part_2.y
    # Plot the retrieved data.
    plot_initial_data_2 (data_x_2, data_y_2)
    # Perform the MCMC procedure.
    data_2 = MCMC.MCMC (sinusoidal, (0,0,0), data_x_2, data_y_2, dp_2, dp_decay_2, N_coarse_2, N_fine_2, N_final_2, N_logging)
    # Retrieve the relevant data (the last N_final_1 batches):
    data_final_2 = data_2[N_coarse_2+N_fine_2+1:N_coarse_2+N_fine_2+N_final_2+1]
    # Calculate and output the optimal A, B, C values:
    A_2_opt, B_2_opt, C_2_opt = np.median([d[1][0] for d in data_final_2]), np.median([d[1][1] for d in data_final_2]), np.median([d[1][2] for d in data_final_2])
    print "Optimal sinusoidal-linear parameter values: A = ", A_2_opt, ", B = ", B_2_opt, ", C = ", C_2_opt
    # Calculate the acceptance rates for all 3 modes. Print them out.
    acc_rates = calc_acceptance_rates ([d[2] for d in data_2[1:N_coarse_2+1]], [d[2] for d in data_2[N_coarse_2+1:N_coarse_2+N_fine_2+1]], [d[2] for d in data_final_2])
    print "Acceptance rate for the Coarse mode: ", round(acc_rates[0]*100, 2), "%."
    print "Acceptance rate for the Fine mode: ", round(acc_rates[1]*100, 2), "%."
    print "Acceptance rate for the Final mode: ", round(acc_rates[2]*100, 2), "%."
    # Plot the posterior distribution for A, B and both. Only for the Final mode.
    plot_distr ([[d[1][0] for d in data_final_2], [d[1][1] for d in data_final_2], [d[1][2] for d in data_final_2]], ['A','B','C'], 2)
    # Plot the cumulative sum graph and calculate 68% and 95% confidence intervals.
    plot_cumul ([[d[1][0] for d in data_final_2], [d[1][1] for d in data_final_2], [d[1][2] for d in data_final_2]], ['A','B','C'], 2, [0.68, 0.95])
    # Plot the predicted function over the initial dataset.
    plot_part_2_result (data_x_2, data_y_2, sinusoidal, (A_2_opt, B_2_opt, C_2_opt))
示例#8
0
文件: main.py 项目: AdkPete/ASTP720
            continue

        time.append(float(i.split()[0]))
        flux.append(float(i.split()[1]))

    return time, flux


t, f = read_lightcurve("lightcurve_data.txt")

x0 = [3.5, 2.2, .1, 0.3]  ##A reasonable initial guess

r = [3.546, 2.25, .007,
     .15]  ##This is my current estimate for our best solution

opt = MCMC.MCMC(P, [t, f])

X, M, ML = opt.M_H(
    Q2, 5000, r
)  ##Lots of iterations, so somewhat slow. Good chance to find the right answer.

print(opt.MAP, opt.MAPL)
plot_fit(opt.MAP, [t, f])

p_list = []
di_list = []
for i in X[500::]:

    p_list.append(i[0])
    di_list.append(i[2])
示例#9
0
    ''' Concatyente Poisson means '''
    out = np.empty(len(disasters_array))
    out[:s] = e
    out[s:] = l
    return out
 9/8:
@deterministic(plot=False)
def rate(s=switchPoint, e=early_mean, l=late_mean):
    ''' Concatyente Poisson means '''
    out = np.empty(len(disasters_array))
    out[:s] = e
    out[s:] = l
    return out
 9/9: disasters = Poisson('disasters', mu=rate, value=disasters_array, observed=True)
9/10: from pymc import MCMC
9/11: M = MCMC(disasters)
9/12: M.sample(iter=10000, burn=1000, thin=10)
9/13: M.trace('switchPoint')[:]
9/14: M.trace('switchpoint')[:]
10/1: fromm open
10/2: from opencv2
12/1: import cv2 as cv
13/1: import scipy.io
13/2: from scipy import io
13/3: import scipy
14/1:
class neuraNetwork:
    def __init__():
        pass
    
    def train():
示例#10
0
# alpha_doublepl = alpha*(c**(tau-sigma))/tau
# t = (sigma*size/alpha_doublepl)**(1/sigma) # size è quella finale o originale?
# u = TruncPois.tpoissrnd(t*w0)
t = (sigma * size / alpha)**(1 / sigma)
u = TruncPois.tpoissrnd(t * w)
n = Updates.update_n(w, G, p_ij)  # sui vecchi o nuovi w?

# output = MCMC("w_gibbs", "exptiltBFRY", iter, sigma=sigma_true, tau=tau_true, alpha=alpha_true, u=u_true,
#               p_ij=p_ij, n=n_true)
output = MCMC("w_gibbs",
              "GGP",
              iter,
              sigma_tau=0.08,
              tau=tau,
              sigma=sigma,
              alpha=alpha,
              u=u,
              n=n,
              p_ij=p_ij,
              c=c,
              w_init=w)
# output = MCMC("w_HMC", "exptiltBFRY", iter, epsilon=epsilon, R=R,
#               tau=tau_true, sigma=sigma_true, alpha=alpha_true, u=u_true, n=n_true, p_ij=p_ij, w_init=w)
output = MCMC("w_HMC",
              "GGP",
              iter,
              epsilon=epsilon,
              R=R,
              tau=tau,
              sigma=sigma,
              alpha=alpha,
def runShatellite(numOfSlices,
                  Acloud,
                  rateDiss,
                  speedCloud,
                  w,
                  ndata,
                  fastFoward,
                  Days,
                  nwalkers,
                  nsamples,
                  nsteps,
                  timespan,
                  phispan,
                  burning,
                  plot=True,
                  mcmc=True):

    #Maximum number of slices is hPerDayHours
    hPerDay = int((w / (2 * np.pi))**(-1))
    if (numOfSlices > hPerDay):
        print("Cannot have more than 24 number of slices for now")
        return 0, 0
    #Generate the initial condition of the planet
    surf = M_init.initialPlanet(numOfSlices, False)
    clouds = M_init.cloudCoverage(numOfSlices)
    finalTime = []
    apparentTime = []
    l, d = dataAlbedoDynamic(numOfSlices,
                             Days,
                             w,
                             Acloud,
                             surf,
                             clouds,
                             rateDiss,
                             speedCloud,
                             Animation=False)

    print("Got sum fake data. YOHO, SCIENCE BITCH!")

    for i in range(1, Days + 1):
        #Seperates the effective albedo and longitude per day.
        effective = d[(i - 1) * numOfSlices:(i) * (numOfSlices)]
        lon = l[(i - 1) * numOfSlices:(i) * (numOfSlices)]
        #Calculates the apparent albedo with the forward model.
        time, apparent = M_Init.apparentAlbedo(effective,
                                               time_days=timespan,
                                               long_frac=phispan,
                                               n=5000,
                                               plot=False,
                                               alb=True)
        finalTime.append(time + (hPerDay * (i - 1)))
        apparentTime.append(apparent)

    finalTime = np.asarray(finalTime).flatten()
    apparentTime = np.asarray(apparentTime).flatten()
    t, a = extractN(finalTime, apparentTime, ndata * Days)
    print("Done extracting {}".format(numOfSlices))
    #Plotting
    if plot:
        fig, ax = plt.subplots(1,
                               1,
                               gridspec_kw={'height_ratios': [1]},
                               figsize=(10, 8))
        for i in range(Days + 1):
            ax.axvline((i) * hPerDay, color='orange', alpha=1, zorder=10)
        ax.plot(finalTime,
                apparentTime,
                '-',
                color='black',
                linewidth=5,
                label="Simulated curve")
        ax.errorbar(t,
                    a,
                    fmt='.',
                    color='green',
                    yerr=np.asarray(a) * 0.02,
                    markersize=8,
                    solid_capstyle='projecting',
                    capsize=4,
                    label="selected {} data".format(ndata))
        ax.set_xlabel("Time (h)", fontsize=22)
        ax.set_ylabel("Apparent Albedo ($A^*$)", fontsize=22)
        ax.tick_params(labelsize=22)
        ax.legend(fontsize=15)
    chainArray = []
    alb = []
    if (mcmc):
        #Implement the MCMC running stuff in a seperate function
        for i in range(1, Days + 1):
            time = t[(i - 1) * ndata:i * ndata]
            app = a[(i - 1) * ndata:i * ndata]
            #Maybe this is wrong, check this, fix this stuff
            lon = np.asarray(l[(i - 1) * numOfSlices:(i) * (numOfSlices)])
            lon = [l % (360) for l in lon]
            lon[lon == 0] = 360
            m.MCMC(nwalkers, nsteps, numOfSlices, time, app, lon, timespan,
                   phispan, burning, hPerDay, chainArray, i, ax, plot)
            print("done MCMC for day {}".format(i))
    for chain in chainArray:
        alb.append(m.mcmc_results(chain, burning))
    return alb
示例#12
0
def main(argv):
    t0 = time.time()
    filename = 'test.pkl'
    nStep = 10000
    nThreads = 1
    dirname = './'
    matrix = ''
    alpha = 0

    try:
        opts, args = getopt.getopt(argv, "f:n:t:a:d:m:")
    except getopt.GetoptError:
        print 'test.py -i <inputfile> -o <outputfile>'
        sys.exit(2)
    for opt, arg in opts:
        if opt == '-f':
            filename = arg
        elif opt == '-n':
            nStep = int(arg)
        elif opt == '-t':
            nThreads = int(arg)
        elif opt == '-a':
            alpha = float(arg)
        elif opt == '-d':
            dirname = arg

    model = load_model("datasets/R_resolution.csv",
                       "datasets/B_resolution.csv")

    #observed = np.loadtxt("datasets/observed_mock_equal.txt")

    fluxD = 100 + np.array([2 * i for i in range(model.nBinsBT)])
    fluxP = 100 + np.array([2 * i for i in range(model.nBinsBT)])
    # fluxP = 100 + np.zeros(model.nBinsBT)
    flux = np.concatenate([fluxP, fluxD])

    observed = make_mock_observation(fluxP, fluxD)

    def logp(value):
        value = np.array(value)
        if (value < 0).any(): return -np.inf

        # value must be and array twice the size of binning
        expected = model(*value.reshape((2, model.nBinsBT)))
        log = (observed * np.log(expected) - expected).sum()
        # Didn't figure that out yet
        #firstDerivative = np.diff(np.log(value))
        #secondDerivative = np.fabs(np.diff(firstDerivative))
        #smoothness = -(alpha * secondDerivative).sum()
        return log  #+ smoothness

    sigma = 5

    def proposal_function(previous_point):
        point = np.zeros(len(previous_point))
        #return previous_point+self.sigma*np.random.standard_normal(self.nVar)
        for i in range(len(previous_point)):
            while True:
                val = previous_point[i] + 0.01 * np.random.standard_normal()
                if val > 0:
                    point[i] = val
                    break
        return point

    filename = 'alpha{}_'.format(alpha) + filename

    threads = []

    for i in range(nThreads):
        theFileName = dirname + '/thread{}_'.format(i) + filename
        a = MCMC.MCMC(theFileName,
                      initialCondition=flux[:],
                      realValues=flux[:])
        a.setProposalFunction(proposal_function)
        a.setLogLikelihoodFunction(logp)
        a.setSteps(nStep)
        threads.append(a)

    for t in threads:
        print 'launching thread'
        t.start()
        time.sleep(1)

    for t in threads:
        t.join()

    print 'done'
    print 'time : {}'.format(time.time() - t0)