Exemplo n.º 1
0
def test_initialise():
    I,J,K = 5,3,2
    R = numpy.ones((I,J))
    M = numpy.ones((I,J))
    
    lambdaU = 2*numpy.ones((I,K))
    lambdaV = 3*numpy.ones((J,K))
    alpha, beta = 3, 1
    priors = { 'alpha':alpha, 'beta':beta, 'lambdaU':lambdaU, 'lambdaV':lambdaV }
    
    # First do a random initialisation - we can then only check whether values are correctly initialised
    init = 'random'
    NMF = nmf_icm(R,M,K,priors)
    NMF.initialise(init)
    
    assert NMF.tau >= 0.0
    for i,k in itertools.product(xrange(0,I),xrange(0,K)):
        assert NMF.U[i,k] >= 0.0
    for j,k in itertools.product(xrange(0,J),xrange(0,K)):
        assert NMF.V[j,k] >= 0.0
    #assert NMF.tau == 3./1.
        
    # Then initialise with expectation values
    init = 'exp'
    NMF = nmf_icm(R,M,K,priors)
    NMF.initialise(init)
    
    assert NMF.tau >= 0.0
    for i,k in itertools.product(xrange(0,I),xrange(0,K)):
        assert NMF.U[i,k] == 1./2.
    for j,k in itertools.product(xrange(0,J),xrange(0,K)):
        assert NMF.V[j,k] == 1./3.
Exemplo n.º 2
0
def test_log_likelihood():
    R = numpy.array([[1,2],[3,4]],dtype=float)
    M = numpy.array([[1,1],[0,1]])
    I, J, K = 2, 2, 3
    lambdaU = 2*numpy.ones((I,K))
    lambdaV = 3*numpy.ones((J,K))
    alpha, beta = 3, 1
    priors = { 'alpha':alpha, 'beta':beta, 'lambdaU':lambdaU, 'lambdaV':lambdaV }
    
    BNMF = nmf_icm(R,M,K,priors)
    BNMF.U = numpy.ones((I,K))
    BNMF.V = 2*numpy.ones((J,K))
    BNMF.tau = 3.
    # expU*expV.T = [[6.]]
    
    log_likelihood = 3./2.*(math.log(3.)-math.log(2*math.pi)) - 3./2. * (5**2 + 4**2 + 2**2)
    AIC = -2*log_likelihood + 2*(2*3+2*3)
    BIC = -2*log_likelihood + (2*3+2*3)*math.log(3)
    MSE = (5**2+4**2+2**2)/3.
    
    assert log_likelihood == BNMF.quality('loglikelihood')
    assert AIC == BNMF.quality('AIC')
    assert BIC == BNMF.quality('BIC')
    assert MSE == BNMF.quality('MSE')
    with pytest.raises(AssertionError) as error:
        BNMF.quality('FAIL')
    assert str(error.value) == "Unrecognised metric for model quality: FAIL."
Exemplo n.º 3
0
def test_predict():
    (I,J,K) = (5,3,2)
    U = numpy.array([[125.,126.],[126.,126.],[126.,126.],[126.,126.],[126.,126.]])
    V = numpy.array([[84.,84.],[84.,84.],[84.,84.]])
    taus = [m**2 for m in range(1,10+1)]
    #R_pred = numpy.array([[21084.,21084.,21084.],[ 21168.,21168.,21168.],[21168.,21168.,21168.],[21168.,21168.,21168.],[21168.,21168.,21168.]])
    
    R = numpy.array([[1,2,3],[4,5,6],[7,8,9],[10,11,12],[13,14,15]],dtype=float)
    M = numpy.ones((I,J))
    lambdaU = 2*numpy.ones((I,K))
    lambdaV = 3*numpy.ones((J,K))
    alpha, beta = 3, 1
    priors = { 'alpha':alpha, 'beta':beta, 'lambdaU':lambdaU, 'lambdaV':lambdaV }
    
    M_test = numpy.array([[0,0,1],[0,1,0],[0,0,0],[1,1,0],[0,0,0]]) #R->3,5,10,11, P_pred->21084,21168,21168,21168
    MSE = (444408561. + 447872569. + 447660964. + 447618649) / 4.
    R2 = 1. - (444408561. + 447872569. + 447660964. + 447618649) / (4.25**2+2.25**2+2.75**2+3.75**2) #mean=7.25
    Rp = 357. / ( math.sqrt(44.75) * math.sqrt(5292.) ) #mean=7.25,var=44.75, mean_pred=21147,var_pred=5292, corr=(-4.25*-63 + -2.25*21 + 2.75*21 + 3.75*21)
    
    NMF = nmf_icm(R,M,K,priors)
    NMF.U = U
    NMF.V = V
    NMF.all_tau = taus
    performances = NMF.predict(M_test)
    
    assert performances['MSE'] == MSE
    assert performances['R^2'] == R2
    assert performances['Rp'] == Rp
Exemplo n.º 4
0
def test_tauV():
    NMF = nmf_icm(R,M,K,priors)
    NMF.initialise(init)
    NMF.tau = 3.
    #U^2 = [[1/4,1/4],[1/4,1/4],[1/4,1/4],[1/4,1/4],[1/4,1/4]], sum_i U^2 = [1,1,1] (index=j)
    tauV = 3.*numpy.array([[1.,1.],[1.,1.],[1.,1.]])
    for j,k in itertools.product(xrange(0,J),xrange(0,K)):
        assert NMF.tauV(k)[j] == tauV[j,k]
Exemplo n.º 5
0
def test_tauU():
    NMF = nmf_icm(R,M,K,priors)
    NMF.initialise(init)
    NMF.tau = 3.
    #V^2 = [[1/9,1/9],[1/9,1/9],[1/9,1/9]], sum_j V^2 = [2/9,1/3,2/9,2/9,1/3] (index=i)
    tauU = 3.*numpy.array([[2./9.,2./9.],[1./3.,1./3.],[2./9.,2./9.],[2./9.,2./9.],[1./3.,1./3.]])
    for i,k in itertools.product(xrange(0,I),xrange(0,K)):
        assert NMF.tauU(k)[i] == tauU[i,k]
Exemplo n.º 6
0
def test_muV():
    NMF = nmf_icm(R,M,K,priors)
    NMF.initialise(init)
    NMF.tau = 3.
    #U*V^T - Uik*Vjk = [[1/6,..]], so Rij - Ui * Vj + Uik * Vjk = 5/6
    tauV = 3.*numpy.array([[1.,1.],[1.,1.],[1.,1.]])
    muV = 1./tauV * ( 3. * numpy.array([[4.*(5./6.)*(1./2.),4.*(5./6.)*(1./2.)],[4.*(5./6.)*(1./2.),4.*(5./6.)*(1./2.)],[4.*(5./6.)*(1./2.),4.*(5./6.)*(1./2.)]]) - lambdaV )
    for j,k in itertools.product(xrange(0,J),xrange(0,K)):
        assert NMF.muV(tauV[:,k],k)[j] == muV[j,k]
Exemplo n.º 7
0
def test_muU():
    NMF = nmf_icm(R,M,K,priors)
    NMF.initialise(init)
    NMF.tau = 3.
    #U*V^T - Uik*Vjk = [[1/6,..]], so Rij - Ui * Vj + Uik * Vjk = 5/6
    tauU = 3.*numpy.array([[2./9.,2./9.],[1./3.,1./3.],[2./9.,2./9.],[2./9.,2./9.],[1./3.,1./3.]])
    muU = 1./tauU * ( 3. * numpy.array([[2.*(5./6.)*(1./3.),10./18.],[15./18.,15./18.],[10./18.,10./18.],[10./18.,10./18.],[15./18.,15./18.]]) - lambdaU )
    for i,k in itertools.product(xrange(0,I),xrange(0,K)):
        assert abs(NMF.muU(tauU[:,k],k)[i] - muU[i,k]) < 0.000000000000001
Exemplo n.º 8
0
def test_run():
    I,J,K = 10,5,2
    R = numpy.ones((I,J))
    M = numpy.ones((I,J))
    M[0,0], M[2,2], M[3,1] = 0, 0, 0
    
    lambdaU = 2*numpy.ones((I,K))
    lambdaV = 3*numpy.ones((J,K))
    alpha, beta = 3, 1
    priors = { 'alpha':alpha, 'beta':beta, 'lambdaU':lambdaU, 'lambdaV':lambdaV }
    init = 'exp' #U=1/2,V=1/3
    
    iterations = 15
    
    NMF = nmf_icm(R,M,K,priors)
    NMF.initialise(init)
    NMF.run(iterations)
    
    assert NMF.all_tau.shape == (iterations,)
    assert NMF.all_tau[1] != alpha/float(beta)
Exemplo n.º 9
0
def test_compute_statistics():
    R = numpy.array([[1,2],[3,4]],dtype=float)
    M = numpy.array([[1,1],[0,1]])
    I, J, K = 2, 2, 3
    lambdaU = 2*numpy.ones((I,K))
    lambdaV = 3*numpy.ones((J,K))
    alpha, beta = 3, 1
    priors = { 'alpha':alpha, 'beta':beta, 'lambdaU':lambdaU, 'lambdaV':lambdaV }
    
    BNMF = nmf_icm(R,M,K,priors)
    
    R_pred = numpy.array([[500,550],[1220,1342]],dtype=float)
    M_pred = numpy.array([[0,0],[1,1]])
    
    MSE_pred = (1217**2 + 1338**2) / 2.0
    R2_pred = 1. - (1217**2+1338**2)/(0.5**2+0.5**2) #mean=3.5
    Rp_pred = 61. / ( math.sqrt(.5) * math.sqrt(7442.) ) #mean=3.5,var=0.5,mean_pred=1281,var_pred=7442,cov=61
    
    assert MSE_pred == BNMF.compute_MSE(M_pred,R,R_pred)
    assert R2_pred == BNMF.compute_R2(M_pred,R,R_pred)
    assert Rp_pred == BNMF.compute_Rp(M_pred,R,R_pred)
Exemplo n.º 10
0
    all_R.append(R)

# We now run the VB algorithm on each of the M's for each noise ratio
all_performances = {metric: [] for metric in metrics}
average_performances = {metric: []
                        for metric in metrics}  # averaged over repeats
for (noise, R, Ms, Ms_test) in zip(noise_ratios, all_R, all_Ms, all_Ms_test):
    print "Trying noise ratio %s." % noise

    # Run the algorithm <repeats> times and store all the performances
    for metric in metrics:
        all_performances[metric].append([])
    for (repeat, M, M_test) in zip(range(0, repeats), Ms, Ms_test):
        print "Repeat %s of noise ratio %s." % (repeat + 1, noise)

        BNMF = nmf_icm(R, M, K, priors)
        BNMF.initialise(init_UV)
        BNMF.run(iterations, minimum_TN=minimum_TN)

        # Measure the performances
        performances = BNMF.predict(M_test)
        for metric in metrics:
            # Add this metric's performance to the list of <repeat> performances for this noise ratio
            all_performances[metric][-1].append(performances[metric])

    # Compute the average across attempts
    for metric in metrics:
        average_performances[metric].append(
            sum(all_performances[metric][-1]) / repeats)

Exemplo n.º 11
0
# Load in data
R = numpy.loadtxt(input_folder + "R.txt")
M = numpy.ones((I, J))

# Run the VB algorithm, <repeats> times
times_repeats = []
performances_repeats = []
for i in range(0, repeats):
    # Set all the seeds
    numpy.random.seed(0)
    random.seed(0)
    scipy.random.seed(0)

    # Run the classifier
    nmf = nmf_icm(R, M, K, priors)
    nmf.initialise(init_UV)
    nmf.run(iterations, minimum_TN=minimum_TN)

    # Extract the performances and timestamps across all iterations
    times_repeats.append(nmf.all_times)
    performances_repeats.append(nmf.all_performances)

# Check whether seed worked: all performances should be the same
assert all([numpy.array_equal(performances, performances_repeats[0]) for performances in performances_repeats]), \
    "Seed went wrong - performances not the same across repeats!"

# Print out the performances, and the average times
icm_all_times_average = list(numpy.average(times_repeats, axis=0))
icm_all_performances = performances_repeats[0]
print "icm_all_times_average = %s" % icm_all_times_average
Exemplo n.º 12
0
from BNMTF.drug_sensitivity.experiments_gdsc.load_data import load_Sanger

import numpy, matplotlib.pyplot as plt

##########

standardised = False #standardised Sanger or unstandardised

iterations = 1000
init_UV = 'random'
I, J, K = 622,138,10

alpha, beta = 1., 1.
lambdaU = numpy.ones((I,K))/10.
lambdaV = numpy.ones((J,K))/10.
priors = { 'alpha':alpha, 'beta':beta, 'lambdaU':lambdaU, 'lambdaV':lambdaV }

# Load in data
(_,X_min,M,_,_,_,_) = load_Sanger(standardised=standardised)

# Run the algorithm
NMF = nmf_icm(X_min,M,K,priors)
NMF.initialise(init_UV)
NMF.run(iterations)

# Plot the tau expectation values to check convergence
plt.plot(NMF.all_tau)

# Print the performances across iterations (MSE)
print "all_performances = %s" % NMF.all_performances['MSE']
Exemplo n.º 13
0
def test_init():
    # Test getting an exception when R and M are different sizes, and when R is not a 2D array.
    R1 = numpy.ones(3)
    M = numpy.ones((2,3))
    I,J,K = 5,3,1
    lambdaU = numpy.ones((I,K))
    lambdaV = numpy.ones((J,K))
    alpha, beta = 3, 1    
    priors = { 'alpha':alpha, 'beta':beta, 'lambdaU':lambdaU, 'lambdaV':lambdaV }
    
    with pytest.raises(AssertionError) as error:
        nmf_icm(R1,M,K,priors)
    assert str(error.value) == "Input matrix R is not a two-dimensional array, but instead 1-dimensional."
    
    R2 = numpy.ones((4,3,2))
    with pytest.raises(AssertionError) as error:
        nmf_icm(R2,M,K,priors)
    assert str(error.value) == "Input matrix R is not a two-dimensional array, but instead 3-dimensional."
    
    R3 = numpy.ones((3,2))
    with pytest.raises(AssertionError) as error:
        nmf_icm(R3,M,K,priors)
    assert str(error.value) == "Input matrix R is not of the same size as the indicator matrix M: (3, 2) and (2, 3) respectively."
    
    # Similarly for lambdaU, lambdaV
    R4 = numpy.ones((2,3))
    lambdaU = numpy.ones((2+1,1))
    priors = { 'alpha':alpha, 'beta':beta, 'lambdaU':lambdaU, 'lambdaV':lambdaV }
    with pytest.raises(AssertionError) as error:
        nmf_icm(R4,M,K,priors)
    assert str(error.value) == "Prior matrix lambdaU has the wrong shape: (3, 1) instead of (2, 1)."
    
    lambdaU = numpy.ones((2,1))
    lambdaV = numpy.ones((3+1,1))
    priors = { 'alpha':alpha, 'beta':beta, 'lambdaU':lambdaU, 'lambdaV':lambdaV }
    with pytest.raises(AssertionError) as error:
        nmf_icm(R4,M,K,priors)
    assert str(error.value) == "Prior matrix lambdaV has the wrong shape: (4, 1) instead of (3, 1)."
    
    # Test getting an exception if a row or column is entirely unknown
    lambdaU = numpy.ones((2,1))
    lambdaV = numpy.ones((3,1))
    M1 = [[1,1,1],[0,0,0]]
    M2 = [[1,1,0],[1,0,0]]
    priors = { 'alpha':alpha, 'beta':beta, 'lambdaU':lambdaU, 'lambdaV':lambdaV }
    
    with pytest.raises(AssertionError) as error:
        nmf_icm(R4,M1,K,priors)
    assert str(error.value) == "Fully unobserved row in R, row 1."
    with pytest.raises(AssertionError) as error:
        nmf_icm(R4,M2,K,priors)
    assert str(error.value) == "Fully unobserved column in R, column 2."
    
    # Finally, a successful case
    I,J,K = 3,2,2
    R5 = 2*numpy.ones((I,J))
    lambdaU = numpy.ones((I,K))
    lambdaV = numpy.ones((J,K))
    M = numpy.ones((I,J))
    priors = { 'alpha':alpha, 'beta':beta, 'lambdaU':lambdaU, 'lambdaV':lambdaV }
    NMF = nmf_icm(R5,M,K,priors)
    
    assert numpy.array_equal(NMF.R,R5)
    assert numpy.array_equal(NMF.M,M)
    assert NMF.I == I
    assert NMF.J == J
    assert NMF.K == K
    assert NMF.size_Omega == I*J
    assert NMF.alpha == alpha
    assert NMF.beta == beta
    assert numpy.array_equal(NMF.lambdaU,lambdaU)
    assert numpy.array_equal(NMF.lambdaV,lambdaV)
    
    # And when lambdaU and lambdaV are integers    
    I,J,K = 3,2,2
    R5 = 2*numpy.ones((I,J))
    lambdaU = 3.
    lambdaV = 4.
    M = numpy.ones((I,J))
    priors = { 'alpha':alpha, 'beta':beta, 'lambdaU':lambdaU, 'lambdaV':lambdaV }
    NMF = nmf_icm(R5,M,K,priors)
    
    assert numpy.array_equal(NMF.R,R5)
    assert numpy.array_equal(NMF.M,M)
    assert NMF.I == I
    assert NMF.J == J
    assert NMF.K == K
    assert NMF.size_Omega == I*J
    assert NMF.alpha == alpha
    assert NMF.beta == beta
    assert numpy.array_equal(NMF.lambdaU,lambdaU*numpy.ones((I,K)))
    assert numpy.array_equal(NMF.lambdaV,lambdaV*numpy.ones((J,K)))
Exemplo n.º 14
0
def test_beta_s():
    NMF = nmf_icm(R,M,K,priors)
    NMF.initialise(init)
    beta_s = beta + .5*(12*(2./3.)**2) #U*V.T = [[1/6+1/6,..]]
    assert abs(NMF.beta_s() - beta_s) < 0.000000000000001
Exemplo n.º 15
0
def test_alpha_s():
    NMF = nmf_icm(R,M,K,priors)
    NMF.initialise(init)
    alpha_s = alpha + 6.
    assert NMF.alpha_s() == alpha_s
Exemplo n.º 16
0
    all_R.append(R)


# We now run the VB algorithm on each of the M's for each noise ratio
all_performances = {metric: [] for metric in metrics}
average_performances = {metric: [] for metric in metrics}  # averaged over repeats
for (noise, R, Ms, Ms_test) in zip(noise_ratios, all_R, all_Ms, all_Ms_test):
    print "Trying noise ratio %s." % noise

    # Run the algorithm <repeats> times and store all the performances
    for metric in metrics:
        all_performances[metric].append([])
    for (repeat, M, M_test) in zip(range(0, repeats), Ms, Ms_test):
        print "Repeat %s of noise ratio %s." % (repeat + 1, noise)

        BNMF = nmf_icm(R, M, K, priors)
        BNMF.initialise(init_UV)
        BNMF.run(iterations, minimum_TN=minimum_TN)

        # Measure the performances
        performances = BNMF.predict(M_test)
        for metric in metrics:
            # Add this metric's performance to the list of <repeat> performances for this noise ratio
            all_performances[metric][-1].append(performances[metric])

    # Compute the average across attempts
    for metric in metrics:
        average_performances[metric].append(sum(all_performances[metric][-1]) / repeats)


print "repeats=%s \nnoise_ratios = %s \nall_performances = %s \naverage_performances = %s" % (
Exemplo n.º 17
0
lambdaV = numpy.ones((J,K))/10.
priors = { 'alpha':alpha, 'beta':beta, 'lambdaU':lambdaU, 'lambdaV':lambdaV }

# Load in data
(_,R,M,_,_,_,_) = load_gdsc(standardised=standardised)


# Run the VB algorithm, <repeats> times
times_repeats = []
performances_repeats = []
for i in range(0,repeats):
    # Set all the seeds
    numpy.random.seed(0)
    
    # Run the classifier
    nmf = nmf_icm(R,M,K,priors) 
    nmf.initialise(init_UV)
    nmf.run(iterations,minimum_TN=minimum_TN)

    # Extract the performances and timestamps across all iterations
    times_repeats.append(nmf.all_times)
    performances_repeats.append(nmf.all_performances)

# Check whether seed worked: all performances should be the same
assert all(numpy.array_equal(performances, performances_repeats[0]) for performances in performances_repeats), \
    "Seed went wrong - performances not the same across repeats!"

# Print out the performances, and the average times
icm_all_times_average = list(numpy.average(times_repeats, axis=0))
icm_all_performances = performances_repeats[0]
print "icm_all_times_average = %s" % icm_all_times_average
Exemplo n.º 18
0
##########

standardised = False  #standardised Sanger or unstandardised
no_folds = 5

iterations = 1000
init_UV = 'random'
I, J, K = 622, 139, 10

alpha, beta = 1., 1.
lambdaU = numpy.ones((I, K)) / 10.
lambdaV = numpy.ones((J, K)) / 10.
priors = {'alpha': alpha, 'beta': beta, 'lambdaU': lambdaU, 'lambdaV': lambdaV}

# Load in data
(_, X_min, M, _, _, _, _) = load_Sanger(standardised=standardised)

# Run the algorithm
NMF = nmf_icm(X_min, M, K, priors)
NMF.initialise(init_UV)
NMF.run(iterations)

# Plot the tau expectation values to check convergence
plt.plot(NMF.all_tau)

# Print the performances across iterations (MSE)
print "all_performances = %s" % NMF.all_performances['MSE']
'''
all_performances = [68.185592823781633, 3.7406955878671249, 3.0952956497536146, 3.0093196335883539, 2.9825243533200183, 2.9643577550979545, 2.9459850054262682, 2.9241393181988635, 2.8970426441581938, 2.8647979550112606, 2.8287634364681966, 2.7912521958298973, 2.7539589370082029, 2.7186618043440998, 2.6869029979300709, 2.6594967226303559, 2.6369317554566618, 2.6184327951545603, 2.6031048058598536, 2.5902861012156722, 2.5791468430768449, 2.5689608242742055, 2.5594562573764947, 2.5501146959110974, 2.5406938709182296, 2.5310858259397389, 2.5212931254248749, 2.5114342295654812, 2.501580454868308, 2.4918258599447105, 2.4823868778575466, 2.473389145201657, 2.4648198771053091, 2.4566954595658497, 2.4490573255748709, 2.4419128535976755, 2.4352541036106987, 2.4290595079091366, 2.423309384201072, 2.4179947009310823, 2.413125243804942, 2.4086708937924723, 2.4045824571856476, 2.4008310567294608, 2.3973987473688405, 2.3942619257426667, 2.3913947844606285, 2.3887730250321679, 2.3863758273338123, 2.3841821148521491, 2.3821729492986905, 2.3803319752939975, 2.3786424906745331, 2.3770903694463974, 2.3756631413950404, 2.3743492943672386, 2.3731378043830453, 2.3720188015936219, 2.3709832996060909, 2.3700230111941956, 2.3691315032908253, 2.3683017043500656, 2.3675277082279846, 2.3668042674763359, 2.3661270381738473, 2.3654912722206016, 2.3648933277639155, 2.3643297396949499, 2.363797433754133, 2.3632936832349389, 2.3628158787698936, 2.3623617246787396, 2.3619291781878138, 2.3615164051916095, 2.3611217071300179, 2.3607435753706811, 2.3603805752857974, 2.3600313505773234, 2.3596962425153816, 2.3593736785402979, 2.3590620435215834, 2.3587604673908604, 2.3584682646619806, 2.3581845969243349, 2.3579089082875968, 2.3576416872171255, 2.357384592934475, 2.357133989607056, 2.3568895588660266, 2.3566508597776403, 2.3564175558822962, 2.3561892475760655, 2.3559655603199152, 2.3557472803194157, 2.3555334155659344, 2.3553240530988133, 2.3551189747246735, 2.3549175403270599, 2.354719525984895, 2.3545248368650409, 2.354333345859323, 2.3541458581277532, 2.3539613449644614, 2.3537796491447001, 2.3536006476426619, 2.3534242353593089, 2.3532502868061695, 2.3530787015877119, 2.3529093936664411, 2.352742280687484, 2.3525772229628541, 2.3524141778261183, 2.3522531008955339, 2.3520939082027312, 2.3519366335083856, 2.3517811826482768, 2.3516274777028405, 2.3514754997988558, 2.3513251800009853, 2.3511764616123636, 2.3510296525068131, 2.3508846226115212, 2.350741187903254, 2.3505991148585461, 2.350458358168134, 2.3503188726564197, 2.3501817135770615, 2.3500472806774346, 2.3499140715719316, 2.3497820353773751, 2.3496511290707636, 2.3495212961114524, 2.3493924889172613, 2.3492646775894541, 2.3491378325683705, 2.3490119276624468, 2.3488869842755751, 2.3487629302374322, 2.3486406005340004, 2.3485193757302261, 2.3483989785412271, 2.3482793457785145, 2.3481604473981261, 2.348042308080764, 2.3479248670017578, 2.3478082389914148, 2.347692562222214, 2.3475783267042916, 2.3474654579935836, 2.3473531497949804, 2.3472415663282691, 2.347131949039142, 2.3470234786776216, 2.3469156232810224, 2.3468083926276502, 2.3467023482557305, 2.346597906774631, 2.3464939397613298, 2.3463904240857736, 2.3462873436981293, 2.3461846995754199, 2.3460826253685081, 2.345981834540658, 2.3458812550271824, 2.3457810579756666, 2.3456812401083056, 2.3455817932289036, 2.3454828986253222, 2.3453843718417602, 2.3452861893881773, 2.3451883470240946, 2.3450908646202975, 2.3449937010484785, 2.3448968328025508, 2.3448002492841566, 2.3447039449136904, 2.3446079034481091, 2.3445121068578967, 2.3444165431569561, 2.3443213703988812, 2.344227346625674, 2.3441335456899233, 2.3440399536649186, 2.34394656160296, 2.343853410309841, 2.3437604640151304, 2.3436676941346879, 2.3435750898562864, 2.343482663322209, 2.3433904073837266, 2.3432982979911277, 2.3432063282456195, 2.3431149936816369, 2.3430241214902479, 2.3429333798909102, 2.3428427591589998, 2.3427522518493435, 2.3426618511665818, 2.3425715505487927, 2.3424813390681853, 2.3423912267661029, 2.34230132143349, 2.3422114880962162, 2.3421216977403803, 2.3420320033018154, 2.3419424683656067, 2.3418529642746342, 2.3417635122750169, 2.341674094816538, 2.3415846939038181, 2.3414953047189648, 2.3414059166045638, 2.3413168805887401, 2.3412280427223431, 2.3411391977537965, 2.3410503389474564, 2.3409614639881404, 2.3408725685700453, 2.3407838359672737, 2.3406951905807674, 2.3406065063806727, 2.3405177761059304, 2.3404289929122966, 2.340340150798208, 2.3402512213232178, 2.3401622235543078, 2.3400731535636212, 2.3399840072670308, 2.3398947804514343, 2.3398054688565426, 2.3397160692171597, 2.3396265786134895, 2.3395369933261341, 2.3394473099658804, 2.3393575252353886, 2.3392676359241813, 2.3391776389027594, 2.3390875311173569, 2.3389973630010683, 2.3389074354072861, 2.3388173778681254, 2.3387270759330261, 2.3386367334877649, 2.3385468323659082, 2.3384569040103793, 2.3383669004588929, 2.3382768131954599, 2.3381866351413572, 2.338096360376793, 2.3380059836437881, 2.3379154936174653, 2.3378248548734568, 2.3377340982048911, 2.3376432223258155, 2.3375522251189946, 2.3374612459886324, 2.3373700919463216, 2.3372789713244488, 2.3371878560277537, 2.3370967222129053, 2.3370055758561064, 2.336914407074357, 2.3368232060958483, 2.3367319633392913, 2.3366407482097991, 2.3365496636512577, 2.3364585069093717, 2.3363672730103171, 2.3362759532436237, 2.336184539089472, 2.3360930208342516, 2.3360013872842353, 2.3359096197361562, 2.335817717507684, 2.3357256760812288, 2.3356334871401669, 2.3355411673096569, 2.3354488111422387, 2.3353562853033014, 2.3352635853141095, 2.3351707030693634, 2.3350776286157551, 2.3349843529582071, 2.3348908679747034, 2.3347971572826962, 2.3347032125857403, 2.3346090359207317, 2.3345146217275903, 2.3344199639953422, 2.334325056766847, 2.3342298942047037, 2.3341344705883706, 2.3340387804778433, 2.333942819358839, 2.3338465804865307, 2.3337500579017245, 2.3336532441584703, 2.3335563261538024, 2.3334598311889287, 2.3333630326249928, 2.333265924632927, 2.333168501888863, 2.3330707589009658, 2.3329726902205223, 2.3328742905056141, 2.3327755545117337, 2.3326764770154944, 2.3325770530781873, 2.3324773100232163, 2.3323772480755585, 2.3322768255639907, 2.3321760363711261, 2.3320748772337709, 2.3319733481012181, 2.3318714401924723, 2.3317691471537167, 2.3316666237023025, 2.3315637757792382, 2.3314605274779976, 2.331356873418764, 2.3312528078567722, 2.3311483253641869, 2.3310434205360897, 2.3309384140786089, 2.3308338977608551, 2.3307289531430282, 2.3306235732459486, 2.330517827777189, 2.3304116346346513, 2.3303049886995466, 2.330197884774921, 2.3300903174353462, 2.3299822812272533, 2.3298737706518993, 2.3297647801780919, 2.3296553042454189, 2.3295453372654169, 2.3294348736223678, 2.3293239076742398, 2.3292124337537738, 2.3291004461695808, 2.328987939118019, 2.3288749065201895, 2.3287613425982889, 2.3286472414729014, 2.3285325970782589, 2.3284173781707476, 2.3283015560679634, 2.3281851889725069, 2.3280682685356715, 2.3279507873743843, 2.3278327383458559, 2.3277141146954934, 2.3275949097395729, 2.3274751166677081, 2.327354728738368, 2.3272338981433456, 2.3271125105834614, 2.3269905056007727, 2.3268678771399349, 2.3267446191084638, 2.3266207253073778, 2.3264961894566052, 2.3263710052163917, 2.3262451662019918, 2.3261186659943132, 2.3259916430514846, 2.3258644707438902, 2.3257366888020963, 2.3256082691466511, 2.325479160513618, 2.325349365066065, 2.3252188777781098, 2.3250876915271199, 2.3249557989006071, 2.3248231923962699, 2.3246899026304271, 2.3245559104094631, 2.3244211877832406, 2.3242857280060019, 2.3241495237075953, 2.3240125674529155, 2.3238748518877794, 2.3237363697580991, 2.3235971138974798, 2.3234570772107275, 2.3233162526600073, 2.3231746332544674, 2.3230322118582758, 2.3228889814459013, 2.322744935122933, 2.3226000660212485, 2.3224543672966065, 2.3223078321269495, 2.3221604537110676, 2.3220122244621164, 2.3218631352751062, 2.3217131775137201, 2.321562344450872, 2.321410629573827, 2.3212580263518481, 2.3211045282419027, 2.3209501286998924, 2.3207948096296578, 2.3206385720243334, 2.3204814126932751, 2.3203233253333386, 2.3201642964298514, 2.3200043255359328, 2.3198434084856765, 2.3196815387539362, 2.3195187087062927, 2.319354907593921, 2.3191901348440913, 2.3190243839466524, 2.3188578355286156, 2.3186903928528326, 2.3185219561902977, 2.3183525195634282, 2.3181820769834935, 2.3180106237694371, 2.3178381568266135, 2.3176646444799474, 2.3174900736660442, 2.317314445787015, 2.3171377540856319, 2.3169599926839095, 2.3167811559609626, 2.3166012384192025, 2.3164206550398685, 2.3162393410883668, 2.3160569556229449, 2.3158734888715098, 2.3156893008055315, 2.3155041402995211, 2.3153178864787662, 2.3151305351486124, 2.3149420812992036, 2.3147525196941796, 2.3145618459633184, 2.3143700642390028, 2.3141771625484711, 2.3139831332567735, 2.3137879701380011, 2.3135916675495882, 2.3133941800434599, 2.3131954676592539, 2.3129955875649957, 2.3127945366554647, 2.3125923123645284, 2.3123889119161314, 2.3121843322688966, 2.3119785698908784, 2.3117716212098705, 2.3115634823543991, 2.3113541501443851, 2.3111436216234598, 2.3109318937525511, 2.3107189627517091, 2.3105048249664164, 2.3102902981849449, 2.3100745667727871, 2.3098576169165774, 2.3096394479492424, 2.3094200573888757, 2.3091994417553514, 2.3089775973084183, 2.3087545201838662, 2.308530206506894, 2.308304652428129, 2.3080778540047593, 2.3078498071759102, 2.3076205079463166, 2.3073899535030886, 2.3071581416053069, 2.3069250608104088, 2.3066906704040013, 2.306455007486552, 2.3062180692687795, 2.305979853594514, 2.3057403586161311, 2.3054995827835523, 2.3052575246555249, 2.3050141999372271, 2.3047696077066595, 2.304523734722228, 2.3042765700296859, 2.30402811418737, 2.3037783711801838, 2.3035273386979518, 2.3032750142809282, 2.3030213965071065, 2.3027664843201854, 2.3025102768622174, 2.3022527734374569, 2.3019939730829155, 2.3017338753034475, 2.3014724798773258, 2.3012104339692243, 2.3009472113254628, 2.3006826874371713, 2.300416862929819, 2.3001497383945662, 2.2998813148570898, 2.299611593368446, 2.299340576523432, 2.2990682664103428, 2.2987946651118008, 2.2985198396539541, 2.298243761110554, 2.2979664002082507, 2.2976877598937437, 2.2974078432657734, 2.2971266535181396, 2.2968441931766508, 2.2965604652172558, 2.2962754732659181, 2.2959892212939392, 2.295701713480955, 2.2954129539264274, 2.2951229467276564, 2.2948316963927082, 2.2945392070809016, 2.2942454832924093, 2.2939505283791903, 2.2936543456336516, 2.2933569416910351, 2.293058323851688, 2.2927584994065313, 2.2924607735406859, 2.2921634674031965, 2.2918649785554615, 2.2915653182835047, 2.2912644971750904, 2.290962524033306, 2.2906594068290071, 2.2903551532318609, 2.2900497708441945, 2.2897432672639382, 2.2894356501537483, 2.2891269273786783, 2.288817105803965, 2.2885061936205853, 2.2881941992517874, 2.2878811305713773, 2.287567073181588, 2.2872520143688222, 2.2869359290872091, 2.286618815274625, 2.2863006781698192, 2.28598152591508, 2.2856613680287188, 2.2853402148343407, 2.2850183501544143, 2.2846959636230277, 2.2843728161144972, 2.28404883707455, 2.2837239986523064, 2.283398291416622, 2.2830717164394287, 2.2827442793574861, 2.2824159882923767, 2.2820868526598472, 2.2817568830959787, 2.2814260908027504, 2.2810944866945602, 2.2807620827280983, 2.280429602359058, 2.2800963657605329, 2.2797623501097148, 2.2794275713716665, 2.2790920439255711, 2.2787557817349411, 2.2784187983962929, 2.2780811072189451, 2.2777438869524906, 2.2774060913455569, 2.277067608165352, 2.2767289693444233, 2.2763900249200204, 2.2760504284487539, 2.2757101895021346, 2.2753693231613776, 2.2750278430439934, 2.2746857781975058, 2.2743431488210848, 2.2739999652665879, 2.273656242443522, 2.2733119966061071, 2.2729672443202498, 2.2726220006839957, 2.2722762819044298, 2.2719301050141034, 2.2715834855583488, 2.2712364402560676, 2.2708889864954731, 2.2705411418580574, 2.2701929239358392, 2.2698443505705446, 2.2694954398407123, 2.2691462100039996, 2.268796679486099, 2.2684468668763293, 2.2680967909946301, 2.2677464701588241, 2.2673959232269438, 2.2670451693991209, 2.266694228097609, 2.2663431188978103, 2.2659918615293373, 2.2656404758614963, 2.2652889818913846, 2.2649373997337485, 2.264585749610498, 2.264234051841814, 2.2638823268370607, 2.2635305950862152, 2.263178877070875, 2.2628271932885302, 2.2624755644019592, 2.2621240111251679, 2.2617725542146738, 2.2614212144619601, 2.2610700126856784, 2.2607189697239809, 2.2603681064271473, 2.2600174436499199, 2.2596670022439933, 2.2593168030504063, 2.2589668668967438, 2.2586172145970251, 2.2582678661835556, 2.2579188407070117, 2.2575701585089418, 2.2572219412062315, 2.2568741408543502, 2.2565267448277666, 2.2561797730729474, 2.255833310195599, 2.2554874239692118, 2.2551420230070627, 2.2547971264086137, 2.2544527536753907, 2.2541089241263168, 2.2537656569574449, 2.2534229712842335, 2.2530808860581906, 2.2527394200587278, 2.2523985918860441, 2.2520584199551128, 2.2517189224899643, 2.2513801175176149, 2.2510420228627219, 2.25070465591152, 2.2503680331876263, 2.2500321717617697, 2.2496970884901462, 2.2493627999977108, 2.2490293220349367, 2.2486966689273422, 2.2483648555705651, 2.2480338964624682, 2.2477038067524155, 2.2473745993780523, 2.2470462878852877, 2.2467188852212043, 2.246392403001932, 2.24606685371573, 2.2457422496170265, 2.2454186015322617, 2.2450962844513951, 2.2447752832106542, 2.2444552660224764, 2.2441362422452298, 2.2438182220835281, 2.2435012159130685, 2.2431852339812393, 2.2428702862738401, 2.242556382488055, 2.2422435319930742, 2.2419317438291113, 2.241621026501877, 2.24131138710157, 2.2410028331554304, 2.2406953709431918, 2.2403899269586258, 2.2400865377910639, 2.2397841984004794, 2.2394829227352586, 2.2391827204670069, 2.2388835986288482, 2.2385855628917337, 2.238288618150365, 2.2379927687657055, 2.2376980185919044, 2.2374043708183149, 2.2371118285712455, 2.236820394665799, 2.2365300716107046, 2.236240861619148, 2.235952765675608, 2.2356657853008857, 2.2353799217636974, 2.2350951760703412, 2.2348115489693336, 2.2345290409567622, 2.2342476502033763, 2.2339673753272891, 2.2336882161316827, 2.2334101724783206, 2.2331332438418547, 2.2328574293740195, 2.232582728195823, 2.2323091372241484, 2.2320366527442346, 2.2317652712874017, 2.2314949897668424, 2.2312258788687047, 2.2309584245434864, 2.2306920639915204, 2.2304267916585649, 2.230162603161626, 2.2298994940376624, 2.2296374595416029, 2.2293764950997201, 2.2291165959750634, 2.2288577572380333, 2.2285999716546065, 2.2283432329652939, 2.2280875349359426, 2.2278328712548321, 2.2275792355170796, 2.2273266203812798, 2.2270750183802477, 2.2268244219085473, 2.2265748236565774, 2.2263262164612678, 2.2260785930711959, 2.225831945321588, 2.2255862650953553, 2.2253415447246119, 2.2250977765038269, 2.2248549526603476, 2.2246130653585237, 2.224372106705371, 2.2241320687571182, 2.2238929536972498, 2.2236547470832342, 2.2234174301487872, 2.2231809888743421, 2.222945412725589, 2.2227106922393727, 2.2224768182312609, 2.2222437816403642, 2.2220115725717866, 2.221780180973246, 2.2215495975705695, 2.2213200837295366, 2.2210915545096022, 2.2208638069328215, 2.2206368307609661, 2.220410616057213, 2.2201851534741635, 2.2199604338167123, 2.2197364479362607, 2.2195131867433862, 2.2192906412128952, 2.2190688092881441, 2.2188476757041697, 2.2186272290666, 2.2184074620246035, 2.2181883704702172, 2.2179699398632073, 2.2177521588804074, 2.2175350167301437, 2.2173185033056009, 2.2171026090113148, 2.2168873242204774, 2.2166726396599512, 2.2164585462722357, 2.2162450351540048, 2.2160320975473566, 2.2158197248356664, 2.2156079085421845, 2.2153966403296881, 2.2151859120018398, 2.2149757155048411, 2.2147660411034331, 2.214556880400381, 2.2143482344735288, 2.2141400901485846, 2.2139324366218829, 2.2137252650299368, 2.2135185675940265, 2.2133123366936895, 2.2131065689161762, 2.2129012536957537, 2.2126963814700042, 2.2124919436448907, 2.2122879329667766, 2.2120843426284567, 2.2118811661493614, 2.2116783973512071, 2.211476030235656, 2.2112740590162403, 2.2110724781104549, 2.2108712821327585, 2.2106704658906859, 2.2104700242102147, 2.210269940168057, 2.2100702189254644, 2.2098708575236232, 2.2096718509290918, 2.2094731932481335, 2.2092748710950696, 2.2090768907154033, 2.2088792479064319, 2.2086819390550687, 2.2084849607687285, 2.2082885138361705, 2.2080925251799903, 2.2078968578409919, 2.2077015084807456, 2.2075064744469115, 2.2073117533321733, 2.2071173429127859, 2.2069232411307951, 2.2067294460511864, 2.2065359545183161, 2.2063427647348335, 2.2061498752553961, 2.2059572847743043, 2.2057649921697013, 2.2055729965041859, 2.2053812970216713, 2.2051898931437504, 2.2049995195218131, 2.2048106443658755, 2.2046220769950415, 2.2044338123789395, 2.2042458484830916, 2.2040581846237468, 2.2038708204990285, 2.2036845079006397, 2.2035036971888911, 2.2033232410849255, 2.203143127101344, 2.2029633510186084, 2.2027839109886553, 2.2026048048887508, 2.2024260314595336, 2.2022475903948382, 2.2020694815705366, 2.2018917050232725, 2.2017142603598465, 2.201537147461802, 2.2013603661439398, 2.2011839473429138, 2.201007908654546, 2.2008322031111054, 2.2006568312193968, 2.2004817937444638, 2.2003070908167834, 2.2001327201774163, 2.1999586824719599, 2.199784979203852, 2.1996116121688885, 2.1994386474994374, 2.1992666694179004, 2.1990950247620669, 2.1989237142339015, 2.1987527396620656, 2.1985821072331793, 2.1984118168813311, 2.1982418697930211, 2.1980722663807986, 2.1979030074499613, 2.1977340950480859, 2.1975655314002882, 2.1973973188557849, 2.1972295127318073, 2.1970620884262808, 2.1968950358091912, 2.1967283520951519, 2.1965620411051425, 2.1963961034017427, 2.1962305397638824, 2.1960653519493158, 2.1959005422180424, 2.1957361131062427, 2.1955720673152701, 2.1954084243956329, 2.1952452185002072, 2.1950824015281913, 2.1949199760997939, 2.1947579920245208, 2.194596486970859, 2.194435378079842, 2.194274667694617, 2.1941143583800913, 2.1939544527866626, 2.1937949536172661, 2.193635863617045, 2.193477185831318, 2.1933189228615122, 2.1931610771930981, 2.193003651333929, 2.1928466473339974, 2.1926900677182219, 2.1925339150542529, 2.1923781919498442, 2.1922229010482983, 2.192068044775962, 2.1919136258061274, 2.1917596469508864, 2.1916061103457647, 2.1914530185461101, 2.1913003741376773, 2.191148179720567, 2.1909964378340838, 2.1908451510669855, 2.1906943220299886, 2.1905439423167259, 2.1903940230873138, 2.1902445687721142, 2.1900955881872939, 2.1899471844863507, 2.1897994703844912, 2.1896522157763383, 2.1895054204059368, 2.1893590850907576, 2.1892132111239051, 2.1890678619587276, 2.1889230366655625, 2.1887786996240099, 2.188634840422683, 2.1884914558546589, 2.1883485453425977, 2.1882061094065848, 2.188064149023301, 2.1879226653432862, 2.1877816594916584, 2.1876411315204249, 2.1875010814276741, 2.1873615100925412, 2.1872224184321758, 2.1870838073818835, 2.1869456778917731, 2.186808030926223, 2.1866708674628623, 2.1865341884924208, 2.1863979943364309, 2.1862622855314773, 2.1861270629942768, 2.1859923276683224, 2.185858080510338, 2.1857243224844827, 2.1855910542129404, 2.1854582754224312, 2.1853259866592878, 2.1851941884964989, 2.1850628815160764, 2.1849320663074456, 2.1848017434657265, 2.1846719135905328, 2.1845425772841769, 2.1844137351508595, 2.1842853877955344, 2.1841575358232137, 2.1840301798385013, 2.1839033204449456, 2.1837769582445801, 2.1836510933556266, 2.1835257253856364, 2.1834008545770041, 2.1832764813676211, 2.1831529204044182, 2.1830300897229895, 2.1829077425477723, 2.1827858785454306, 2.1826644957387589, 2.1825435931350921, 2.1824231699261816, 2.1823032254438868, 2.1821837605685368, 2.1820647736731473, 2.1819462639724989, 2.1818282309291153, 2.1817106740861965, 2.1815935930410033, 2.1814769874354898, 2.1813608569507799, 2.1812452013026657, 2.1811300202384571, 2.1810153135344041, 2.180901070109551, 2.180787288044236, 2.1806739705568847, 2.1805611186825757, 2.1804487330784821]
'''