Exemple #1
0
    # net.rnn(text.max_word_length)
    net.regression(dimensions=2)  # for


def denseConv(net):
    # type: (layer.net) -> None
    print("Building dense-net")
    net.reshape(shape=[-1, size, size,
                       letter.color_channels])  # Reshape input picture
    net.buildDenseConv(nBlocks=1)


""" Baseline tests to see that your model doesn't have any bugs and can learn small test sites without efforts """

# net = layer.net(layer.baseline, input_width=size, output_width=nClasses, learning_rate=learning_rate)
# net.train(data=data, test_step=1000)  # run
""" here comes the real network """

# net = layer.net(denseConv, input_width=size, output_width=2, learning_rate=learning_rate)
net = layer.net(startPositionGanglion,
                input_width=size,
                output_width=2,
                learning_rate=learning_rate)

# net.train(data=data,steps=50000,dropout=0.6,display_step=1,test_step=1) # debug
# net.train(data=data, steps=training_steps,dropout=0.6,display_step=5,test_step=20) # test
net.train(data=data, dropout=.6, display_step=5, test_step=100)  # run resume

# net.predict() # nil=random
# net.generate(3)  # nil=random
from os import system

import layer

app = Tkinter.Tk()
import matplotlib.pyplot as plt
plt.matshow([[1, 0], [0, 1]], fignum=1)
# print(dir(plt))
# help(plt)
# ax.patch.set_facecolor('None') or ax.patch.set_visible(False).
plt.draw()
system('''/usr/bin/osascript -e 'tell app "Finder" to set frontmost of process "Python" to true' ''')

# LOAD MODEL!
net = layer.net(model="denseConv", input_shape=[28,28])
# net = layer.net(model="denseConv", input_shape=[784])
net.predict()#random : debug

i = 0
width = 256
height = 256

def get_mouse_position():
	if sys.platform == 'Windows':
			import win32api
			x, y = win32api.GetCursorPos()
	else:
			x, y = app.winfo_pointerxy()
	return x,y
def recurrent(net):
	# type: (layer.net) -> None
	net.rnn()
	net.classifier()


def denseNet(net):
	# type: (layer.net) -> None
	print("Building fully connected pyramid")
	net.reshape(shape)  # Reshape input picture
	net.fullDenseNet()
	net.classifier() # auto classes from labels

# width=64 # for pcm baby data
# batch=speech_data.spectro_batch_generator(1000,target=speech_data.Target.digits)
# classes=10

# CHOSE MODEL ARCHITECTURE HERE:
# net=layer.net(simple_dense, data=batch,input_shape=[height,width],output_width=classes, learning_rate=learning_rate)
# net=layer.net(model=alex,input_width= width*height,output_width=classes, learning_rate=learning_rate)
# net=layer.net(model=denseConv,input_width= width*height,output_width=classes, learning_rate=learning_rate)
net = layer.net(recurrent, data=batch, input_shape=[height, width], output_width=classes, learning_rate=learning_rate)

# net.train(data=batch,batch_size=10,steps=500,dropout=0.6,display_step=1,test_step=1) # debug
net.train(data=batch,batch_size=10,steps=training_iters,dropout=0.6,display_step=10,test_step=100) # test
# net.train(data=batch,batch_size=batch_size,steps=training_iters,dropout=0.6,display_step=10,test_step=100) # run

# net.predict() # nil=random
# net.generate(3)  # nil=random

training_steps = 500000
batch_size = 10
size = text.canvas_size


def denseConv(net):
    # type: (layer.net) -> None
    print("Building dense-net")
    net.reshape(shape=[-1, size, size,
                       letter.color_channels])  # Reshape input picture
    net.buildDenseConv(nBlocks=1)


""" Baseline tests to see that your model doesn't have any bugs and can learn small test sites without efforts """

# net = layer.net(layer.baseline, input_width=size, output_width=nClasses, learning_rate=learning_rate)
# net.train(data=data, test_step=1000)  # run
""" here comes the real network """
net = layer.net(denseConv,
                input_width=size,
                output_width=2,
                learning_rate=learning_rate)

# net.train(data=data,steps=50000,dropout=0.6,display_step=1,test_step=1) # debug
# net.train(data=data, steps=training_steps,dropout=0.6,display_step=5,test_step=20) # test
net.train(data=data, dropout=.6, display_step=5, test_step=100)  # run resume

# net.predict() # nil=random
# net.generate(3)  # nil=random
Exemple #5
0
import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
from letter import letter as l
size = letter.max_size
# def denseConv(net):
# 	# type: (layer.net) -> None
# 	print("Building dense-net")
# 	net.reshape(shape=[-1, size, size, 1])  # Reshape input picture
# 	net.buildDenseConv(nBlocks=1)
# 	net.classifier()  # 10 classes auto
# # net=layer.net(alex,input_width=28, output_width=nClasses, learning_rate=learning_rate) # NOPE!?
# net = layer.net(denseConv, input_width=size, output_width=letter.nClasses)

# # LOAD MODEL!
net = layer.net(model="denseConv", input_shape=[size, size])

# net = layer.net(model="denseConv", input_shape=[784])
# net.predict()  # random : debug


# net.generate(3)  # nil=random
def norm(mat):
    mat = 1 - 2 * mat / 255.  # norm [-1,1] !


# mat = 1 - mat / 255.  # norm [0,1]!
# mat = mat / 255.  # norm [0,1]!


def predict(mat, norm=False):
	net.classifier() # auto classes from labels


train_digits=True
if train_digits:
	width= height=64 # for pcm baby data
	batch=speech_data.spectro_batch_generator(1000,target=speech_data.Target.digits)
	classes=10 # digits
else:
	width=512 # for spoken_words overkill data
	classes=74 #
	batch=word_batch=speech_data.spectro_batch_generator(10, width, source_data=Source.WORD_SPECTROS, target=Target.first_letter)
	raise Exception("TODO")

X,Y=next(batch)

# CHOOSE MODEL ARCHITECTURE HERE:
# net = layer.net(simple_dense, data=batch, input_width=width, output_width=classes, learning_rate=0.01)
net = layer.net(simple_dense, data=batch, input_shape=(width,height), output_width=classes, learning_rate=0.01)
# net=layer.net(model=alex,input_shape=(width, height),output_width=10, learning_rate=learning_rate)
# net=layer.net(model=denseConv, input_shape=(width, height),output_width=10, learning_rate=learning_rate)

net.train(data=batch,batch_size=10,steps=500,dropout=0.6,display_step=1,test_step=1) # debug
# net.train(data=batch,batch_size=10,steps=5000,dropout=0.6,display_step=5,test_step=20) # test
# net.train(data=batch,batch_size=10,steps=5000,dropout=0.6,display_step=10,test_step=100) # run

# net.predict() # nil=random
# net.generate(3)  # nil=random

print ("Now try switching between model architectures in line 68-71")
def recurrent(net):
	# type: (layer.net) -> None
	net.rnn()
	net.classifier()


def denseNet(net):
	# type: (layer.net) -> None
	print("Building fully connected pyramid")
	net.reshape(shape)  # Reshape input picture
	net.fullDenseNet()
	net.classifier() # auto classes from labels

# width=64 # for pcm baby data
# batch=speech_data.spectro_batch_generator(1000,target=speech_data.Target.digits)
# classes=10

# CHOSE MODEL ARCHITECTURE HERE:
# net=layer.net(simple_dense, data=batch,input_shape=[height,width],output_width=classes, learning_rate=learning_rate)
# net=layer.net(model=alex,input_width= width*height,output_width=classes, learning_rate=learning_rate)
# net=layer.net(model=denseConv,input_width= width*height,output_width=classes, learning_rate=learning_rate)
net = layer.net(recurrent, data=batch, input_shape=[height, width], output_width=classes, learning_rate=learning_rate)

# net.train(data=batch,batch_size=10,steps=500,dropout=0.6,display_step=1,test_step=1) # debug
net.train(data=batch,batch_size=10,steps=training_iters,dropout=0.6,display_step=10,test_step=100) # test
# net.train(data=batch,batch_size=batch_size,steps=training_iters,dropout=0.6,display_step=10,test_step=100) # run

# net.predict() # nil=random
# net.generate(3)  # nil=random

    net.classifier()  # 10 classes auto


# OK, not bad, Alex!
#  Step 6490 Loss= 0.000908 Accuracy= 1.000                        Test Accuracy: 0.995
def alex(net):
    # type: (layer.net) -> None
    print("Building Alex-net")
    net.reshape(shape=[-1, 28, 28, 1])  # Reshape input picture
    # net.batchnorm()
    net.conv([3, 3, 1, 64])
    net.conv([3, 3, 64, 128])
    net.conv([3, 3, 128, 256])
    net.dense(1024, activation=tf.nn.relu)
    net.dense(1024, activation=tf.nn.relu)


# net=layer.net(baseline, data, learning_rate=0.001)
# net=layer.net(alex,data, learning_rate=0.001) # NOPE!?
net = layer.net(denseConv, data, learning_rate)

# net.train(steps=50000,dropout=0.6,display_step=1,test_step=1) # debug
# net.train(steps=50000,dropout=0.6,display_step=5,test_step=20) # test
net.train(data=data,
          steps=training_iters,
          dropout=.6,
          display_step=10,
          test_step=100)  # run
# net.predict() # nil=random
# net.generate(3)  # nil=random
Exemple #9
0
# OK, not bad, Alex!
#  Step 6490 Loss= 0.000908 Accuracy= 1.000                        Test Accuracy: 0.995
def alex(net):
    # type: (layer.net) -> None
    print("Building Alex-net")
    net.reshape(shape=[-1, 28, 28, 1])  # Reshape input picture
    # net.batchnorm()
    net.conv([3, 3, 1, 64])
    net.conv([3, 3, 64, 128])
    net.conv([3, 3, 128, 256])
    net.dense(1024, activation=tf.nn.relu)
    net.dense(1024, activation=tf.nn.relu)


net = layer.net(baseline,
                input_width=28,
                output_width=nClasses,
                learning_rate=learning_rate)
# net=layer.net(alex,input_width=28, output_width=nClasses, learning_rate=learning_rate) # NOPE!?
# net=layer.net(denseConv, input_width=28, output_width=nClasses,learning_rate=learning_rate)
# net.train(steps=50000,dropout=0.6,display_step=1,test_step=1) # debug
# net.train(steps=50000,dropout=0.6,display_step=5,test_step=20) # test
net.train(data=data,
          steps=training_iters,
          dropout=.6,
          display_step=10,
          test_step=100)  # run
# net.predict() # nil=random
# net.generate(3)  # nil=random
Exemple #10
0
#!/usr/bin/python

import layer
import letter

size = letter.max_size


def denseConv(net):
    # type: (layer.net) -> None
    print("Building dense-net")
    net.reshape(shape=[-1, size, size, 1])  # Reshape input picture
    net.buildDenseConv(nBlocks=1)
    net.classifier()  # 10 classes auto


# net=layer.net(alex,input_width=28, output_width=nClasses, learning_rate=learning_rate) # NOPE!?
net = layer.net(denseConv, input_width=size, output_width=letter.nClasses)

net.predict()  # nil=random
# net.generate(3)  # nil=random
Exemple #11
0
            label.append(tr_labels[now_i])
            now_i += 1
            if now_i == length:
                now_i = 0
        yield data, label
        data = []
        label = []


#-------------------------------------------------------------------------------------------------------------------------------------# CHOOSE MODEL ARCHITECTURE HERE:
# net = layer.net(simple_dense, data=batch, input_width=width, output_width=classes, learning_rate=0.01)
# net = layer.net(simple_dense, input_shape=(width,height), output_width=classes, learning_rate=0.01)
# net=layer.net(model=alex,input_shape=(width, height),output_width=10, learning_rate=learning_rate)

net = layer.net(model=denseConv,
                input_shape=(width, height),
                output_width=2,
                learning_rate=learning_rate)

print net

#net.train_ichikawa_2(data=ichikawa,batch_size=10,steps=20000,dropout=0.6,display_step=10,test_step=100,ckpt_name="20170904.ckpt",start_ckpt="20170817.ckpt") # debug
ichikawa = test_now()  #test
net.accuracy_test(data=ichikawa,
                  batch_size=10,
                  steps=100,
                  dropout=0.6,
                  display_step=1,
                  test_step=1,
                  ckpt_name="20170823.ckpt")

def alex(net):
    # type: (layer.net) -> None
    print("Building Alex-net")
    net.reshape(shape=[-1, 28, 28, 1])  # Reshape input picture
    # net.batchnorm()
    net.conv([3, 3, 1, 64])
    net.conv([3, 3, 64, 128])
    net.conv([3, 3, 128, 256])
    net.dense(1024, activation=tf.nn.relu)
    net.dense(1024, activation=tf.nn.relu)


net = layer.net(baseline,
                input_shape=[28, 28],
                output_width=nClasses,
                learning_rate=0.001)
# net=layer.net(alex,data, learning_rate=0.001) # NOPE!?
# net=layer.net(denseConv, data=data, output_width=-1,learning_rate=learning_rate)

# net.train(steps=50000,dropout=0.6,display_step=1,test_step=1) # debug
# net.train(steps=50000,dropout=0.6,display_step=5,test_step=20) # test
net.train(data=data,
          steps=training_iters,
          dropout=.6,
          display_step=10,
          test_step=100)  # run

# net.predict() # nil=random
# net.generate(3)  # nil=random
	# net.buildDenseConv(nBlocks=1)
	net.conv2d(20)
	net.argmax2d()
	net.regression(dimensions=2) # for



def denseConv(net):
	# type: (layer.net) -> None
	print("Building dense-net")
	net.reshape(shape=[-1, size, size, letter.color_channels])  # Reshape input picture
	net.buildDenseConv(nBlocks=1)


""" Baseline tests to see that your model doesn't have any bugs and can learn small test sites without efforts """

# net = layer.net(layer.baseline, input_width=size, output_width=nClasses, learning_rate=learning_rate)
# net.train(data=data, test_step=1000)  # run

""" here comes the real network """

# net = layer.net(denseConv, input_width=size, output_width=2, learning_rate=learning_rate)
net = layer.net(positionGanglion, input_width=size, output_width=output_shape, learning_rate=learning_rate)

# net.train(data=data,steps=50000,dropout=0.6,display_step=1,test_step=1) # debug
# net.train(data=data, steps=training_steps,dropout=0.6,display_step=5,test_step=20) # test
net.train(data=data, dropout=.6, display_step=5, test_step=100) # run resume

# net.predict() # nil=random
# net.generate(3)  # nil=random
	print("Building dense-net")
	# tf.image.crop_and_resize()
	net.reshape(shape=[-1, size, size, 1])  # Reshape input picture
	# net.conv([3, 3, 1, 64])
	net.buildDenseConv(nBlocks=1)
	# net.dense(96*3)
	net.classifier() # 10 classes auto

def alex(net):
	# type: (layer.net) -> None
	print("Building Alex-net")
	net.reshape(shape=[-1, size, size, 1])  # Reshape input picture
	# net.batchnorm()
	net.conv([3, 3, 1, 64])
	net.conv([3, 3, 64, 128])
	net.conv([3, 3, 128, 256])
	net.dense(1024,activation=tf.nn.relu)
	net.dense(1024,activation=tf.nn.relu)


# net=layer.net(baseline, input_shape=[28,28], output_width=nClasses,learning_rate=0.001)
# net=layer.net(alex,data, learning_rate=0.001) # NOPE!?
net=layer.net(denseConv, input_shape=[size, size], output_width=nClasses,learning_rate=learning_rate)

# net.train(steps=50000,dropout=0.6,display_step=1,test_step=1) # debug
# net.train(steps=50000,dropout=0.6,display_step=5,test_step=20) # test
net.train(data=data, steps=training_iters, dropout=.6, display_step=10, test_step=100) # run

# net.predict() # nil=random
# net.generate(3)  # nil=random
Exemple #15
0
# width=64 # for pcm baby data
# batch=speech_data.spectro_batch_generator(1000,target=speech_data.Target.digits)
# classes=10

width = 512  # for spoken_words overkill data
classes = 74  #
batch = word_batch = speech_data.spectro_batch_generator(
    10, width, source_data=Source.WORD_SPECTROS, target=Target.first_letter)
X, Y = next(batch)

# CHOSE MODEL ARCHITECTURE HERE:
# net=layer.net(simple_dense, width*width, classes, learning_rate=0.01)
# net=layer.net(model=alex,input_width=64*64,output_width=10, learning_rate=0.001)
net = layer.net(model=denseConv,
                input_width=64 * 64,
                output_width=10,
                learning_rate=0.001)

net.train(data=batch,
          batch_size=10,
          steps=500,
          dropout=0.6,
          display_step=1,
          test_step=1)  # debug
# net.train(data=batch,batch_size=10,steps=5000,dropout=0.6,display_step=5,test_step=20) # test
# net.train(data=batch,batch_size=10,steps=5000,dropout=0.6,display_step=10,test_step=100) # run

# net.predict() # nil=random
# net.generate(3)  # nil=random