Пример #1
0
def compute_one(
    p,
    n,
    n_alt_plus,
    n_alt_minus,
):
    B = simple.Simple(n=n, p=p).flatten()
    Bplus = simple.Simple(n=n_alt_plus, p=p).flatten()
    Bminus = simple.Simple(n=n_alt_minus, p=p).flatten()
    print "Computing Plus Alternative"
    SPlus = compute_alt(B, Bplus)
    print "Computing Minus Alternative"
    SMinus = compute_alt(B, Bminus)
    return SPlus, SMinus
Пример #2
0
def SfNplot():
    ntrue = 100
    nalt0 = 99
    nalt1 = 90
    S100_1 = simple.Simple(n=ntrue, p=.1).flatten()
    S99_1 = simple.Simple(n=nalt0, p=.1).flatten()
    S90_1 = simple.Simple(n=nalt1, p=.1).flatten()
    S100_5 = simple.Simple(n=ntrue, p=.5).flatten()
    S99_5 = simple.Simple(n=nalt0, p=.5).flatten()
    S90_5 = simple.Simple(n=nalt1, p=.5).flatten()
    S100_9 = simple.Simple(n=ntrue, p=.9).flatten()
    S99_9 = simple.Simple(n=nalt0, p=.9).flatten()
    S90_9 = simple.Simple(n=nalt1, p=.9).flatten()
    fig, ax = my_subplots_for_sfn()
    S100_1.compare_3bars(S99_1,
                         S90_1,
                         ntrue, (fig, ax[2][0]),
                         xlab='Number of open channels (k)')
    S100_5.compare_3bars(S99_5, S90_5, ntrue, (fig, ax[1][0]))
    S100_9.compare_3bars(S99_9, S90_9, ntrue, (fig, ax[0][0]))
    se_regions(2, False, (fig, ax[0][1]))
    se_regions(1, False, (fig, ax[1][1]))
    se_regions(0, False, (fig, ax[2][1]))
    se_regions(2, True, (fig, ax[0][1]))
    se_regions(1, True, (fig, ax[1][1]))
    se_regions(0, True, (fig, ax[2][1]))
    venn((30, .1), .1, .05, (fig, ax[0][0]))
    ax[0][0].text(18, .085, 'n-10%=90')
    ax[0][0].text(41, .105, 'n=100')
    ax[0][0].text(4.5, .105, 'n-1=99')
    ax[0][0].text(5, .12, 'Number of Channels:')
    ax[0][0].text(5, .13, 'Probability of Opening: p=0.9')
    ax[1][0].text(5, .13, 'Probability of Opening: p=0.5')
    ax[2][0].text(20, .13, 'Probability of Opening: p=0.1')
    ax[0][1].text(5, 130, 'Probability of Opening: p=0.9')
    ax[0][1].text(5, 120, 'Number of Channels in Alternative:')
    ax[0][1].text(10, 110, '(Falsified with 95% Confidence)')
    ax[1][1].text(5, 1300, 'Probability of Opening: p=0.5')
    ax[2][1].text(5, 13000, 'Probability of Opening: p=0.1')
    ax[0][1].set_ylabel('Needed Sample Size')
    ax[1][1].set_ylabel('Needed Sample Size')
    ax[2][1].set_ylabel('Needed Sample Size')
    ax[2][1].set_xlabel('Number of Channels in True Model (n)')
    ax[0][1].text(5, 90, 'n-1')
    ax[0][1].text(5, 72.5, 'n-10%')
    ax[0][1].text(43, 90, 'n+1')
    ax[0][1].text(43, 72.5, 'n+10%')
    venn2((29, 90), .1, .06, (fig, ax[0][1]))
    fig.show()
    return fig, ax
Пример #3
0
def test_hello_world():
    import simple
    os.chdir('/tmp')
    assert simple.Simple().hello_world() == 'Hello world!'
Пример #4
0
class Simple2:
    def __init__(self):
        self.info = "SimpleClass2"


class Simple3(simple.Simple):
    def __init__(self):
        simple.Simple.__init__(self)


text = "text in simple"

assert simple.text == text

_s = simple.Simple()
_s3 = Simple3()
assert _s.info == _s3.info

import recursive_import
_s = recursive_import.myClass()

assert str(_s) == "success!"

import from_import_test.b
assert from_import_test.b.v == 1

import from_import_test.c
assert from_import_test.c.v == 1

# test of keyword "global" in functions of an imported module
Пример #5
0
# issue 44
funcs = []
for i in [1, 2]:

    def f(x=i):
        return x

    funcs.append(f)

assert funcs[0]() == 1
assert funcs[1]() == 2

# issue 45
import simple
assert simple.Simple().info == "SimpleClass"


# issue 46
class A:

    COUNTER = 0

    def __init__(self):
        self.COUNTER += 1


a = A()


class A:
Пример #6
0
 def test_hello_world(self):
     import simple
     self.assertEqual(simple.Simple().hello_world('!'), 'Hello world!')
Пример #7
0
 def test_patch(self, patched_hw):
     import simple
     simple.Simple().hello_world('!')
     patched_hw.assert_called_once_with('!')
Пример #8
0
 def tearDown(self):
     simple.Simple().finishUp()
Пример #9
0
 def setUp(self):
     simple.Simple().getReady()
Пример #10
0
            
        sy_loss = - tf.reduce_mean(sy_states[-1], name="loss")
        sy_trainable = [neg]
        sy_opt = tf.train.GradientDescentOptimizer(sy_lr, name="GDOpt")
        sy_train_op = sy_opt.minimize(sy_loss, var_list=sy_trainable)
    
    return {"cl_in": x, "cl_label": y, "cl_logit": states[-1],
            "cl_prob": cl_prob, "cl_out": cl_out, "cl_op": cl_train_op,
            "cl_loss": cl_loss, "cl_acc": accuracy,
            "sy_sample": neg, "sy_op": sy_train_op, 
            "sy_logit": sy_states[-1], "sy_loss": sy_loss}

if __name__ == "__main__":
    
    m = build_arch()
    d = simple.Simple()
    sess = tf.Session()
    sess.run(tf.global_variables_initializer())
    
    neg_data_seq = [sess.run(m["sy_sample"])]
    
    for epoch in range(151):
        
        d.set_neg(neg_data_seq[-1])
        plt.plot(neg_data_seq[-1][:,0], neg_data_seq[-1][:,1], ".")
        plt.xlim([-10, 10])
        plt.ylim([-10, 10])
        plt.savefig("figure/"+str(epoch)+".jpg")
        plt.show()
        
        for i in range(2000):