コード例 #1
0
def autoex1(epochs=2000, num_bits=3, lrate=0.5, tint=25, showint=100):
    ann = autoencoder(nh=num_bits, lr=lrate)
    PLT.ion()
    ann.do_training(epochs, test_interval=tint, show_interval=showint)
    ann.do_testing(
        scatter=True
    )  # Do a final round of testing to plot the hidden-layer activation vectors.
    PLT.ioff()
    TFT.close_session(ann.current_session)
    return ann
コード例 #2
0
ファイル: tutor1.py プロジェクト: odinbp/NeuralNet
def tfex7(n=5):
    w = tf.Variable(np.random.uniform(-1, 1, size=(5, 5)),
                    name='w')  # Same matrix as before
    x = tf.Variable(np.zeros((1, 5)), name='x')
    y = tf.placeholder(tf.float64, shape=(1, 5), name='y')
    feeder = {y: np.random.uniform(-1, 1, size=(1, 5))}
    update_x = x.assign(tf.matmul(x, w) + y)
    _, _, sess = quickrun4([update_x], [w, x, y], feed_dict=feeder)
    for step in range(n):
        quickrun4([update_x], [x], session=sess, feed_dict=feeder)
    TFT.close_session(sess)
コード例 #3
0
ファイル: tutor1.py プロジェクト: odinbp/NeuralNet
def tfex8(size=5, steps=50, tvect=None, learning_rate=0.5, showint=10):
    target = tvect if tvect else np.ones((1, size))
    w = tf.Variable(np.random.uniform(-.1, .1, size=(size, size)),
                    name='weights')  # weights applied to x.
    b = tf.Variable(np.zeros((1, size)), name='bias')  # bias terms
    x = tf.placeholder(tf.float64, shape=(1, size), name='input')
    y = tf.sigmoid(
        tf.matmul(x, w) + b, name='out-sigmoid'
    )  # Gather all weighted inputs, then apply activation function
    error = tf.reduce_mean(tf.square(target - y))
    optimizer = tf.train.GradientDescentOptimizer(learning_rate)
    training_operator = optimizer.minimize(error)
    feeder = {x: np.random.uniform(-1, 1, size=(1, size))}
    sess = TFT.gen_initialized_session()
    for step in range(steps):
        quickrun4([training_operator], [w, b, y],
                  session=sess,
                  feed_dict=feeder,
                  step=step,
                  show_interval=showint)
    TFT.close_session(sess)
コード例 #4
0
ファイル: tutor2.py プロジェクト: stianvale/AIProgTflow
def autoex1(epochs=1000000,
            num_bits=15,
            lrate=1,
            tint=100,
            showint=10000,
            mbs=53):
    #case_generator = (lambda: TFT.gen_all_parity_cases(10))
    #case_generator = (lambda: TFT.gen_segmented_vector_cases(25,1000,0,8))
    #case_generator = (lambda: TFT.gen_vector_count_cases(500,15))
    case_generator = "yeast.txt"
    cman = Caseman(cfunc=case_generator,
                   sep=",",
                   vfrac=0,
                   tfrac=0,
                   stdeviation=True)
    ann = autoencoder(nh=num_bits, lr=lrate, cman=cman, mbs=mbs)
    PLT.ion()
    ann.do_training(epochs, test_interval=tint, show_interval=showint, mbs=mbs)
    ann.do_testing(
        scatter=True, mbs=mbs
    )  # Do a final round of testing to plot the hidden-layer activation vectors.
    PLT.ioff()
    TFT.close_session(ann.current_session, False)
    return ann
コード例 #5
0
 def close_current_session(self, view=True):
     self.save_session_params(sess=self.current_session)
     TFT.close_session(self.current_session, view=view)
コード例 #6
0
ファイル: tutor4.py プロジェクト: stianvale/AIProgTflow
def mainfunc(steps=1000000,
             lrate="scale",
             tint=100,
             showint=10000,
             mbs=100,
             wgt_range=(-.3, .3),
             hidden_layers=[50, 50],
             hidac=(lambda x, y: tf.tanh(x, name=y)),
             outac=(lambda x, y: tf.nn.softmax(x, name=y)),
             case_generator="mnist.txt",
             stdeviation=False,
             vfrac=0.1,
             tfrac=0.1,
             cfunc="rmse",
             mapbs=0,
             map_layers=[],
             display_wgts=[],
             display_biases=[],
             cfrac=1.0,
             dendrogram_layers=[],
             numeric=False):

    cman = Caseman(cfunc=case_generator,
                   cfrac=cfrac,
                   vfrac=vfrac,
                   tfrac=tfrac,
                   stdeviation=stdeviation)
    ann = Gann(lr=lrate,
               cman=cman,
               mbs=mbs,
               wgt_range=wgt_range,
               hidden_layers=hidden_layers,
               hidac=hidac,
               outac=outac,
               cfunc=cfunc,
               dendrogram_layers=dendrogram_layers)
    PLT.ion()
    epochs = int(steps / mbs)
    ann.do_training(epochs, test_interval=tint, show_interval=showint, mbs=mbs)

    for out_layer in map_layers:
        ann.add_mapvar(out_layer - 1, "out")

    for wgts_layer in display_wgts:
        if (wgts_layer == 0):
            continue
        ann.add_mapvar(wgts_layer - 1, "wgt")

    for bias_layer in display_biases:
        if (bias_layer == 0):
            continue
        ann.add_mapvar(bias_layer - 1, "bias")

    for dendro_layer in dendrogram_layers:
        if (dendro_layer == 0):
            continue
        ann.add_mapvar(dendro_layer - 1, type="out")

    ##OBS: Skal vel ikke vise dendrogram mellom input og output i hele nettverket, men for et ønsket layer
    if (mapbs > 0):
        ann.do_mapping(mbs=mbs, mapbs=mapbs, numeric=numeric)

    PLT.ioff()
    TFT.close_session(ann.current_session, False)
    return ann
コード例 #7
0
ファイル: module3_mnist.py プロジェクト: Hammerset/AI_module3
	def closeCurrentSession(self, view = True):
		self.saveSessionParams(sess = self.currentSession)
		tft.close_session(self.currentSession, view = view)