예제 #1
0
    def __init__(self, ls, cost):
        maxPool = MCONV.MaxPooling2D(2, 2)

        #The input channeler will take regular layers and arrange them into several channels
        i = MCONV.Input(nbChannels=3, height=256, width=256, name='inp')
        #ichan = MCONV.InputChanneler(256, 256, name = 'inpChan')

        c1 = MCONV.Convolution2D(nbFilters=3,
                                 filterHeight=5,
                                 filterWidth=5,
                                 activation=MA.Tanh(),
                                 pooler=maxPool,
                                 name="conv1")

        c2 = MCONV.Convolution2D(nbFilters=3,
                                 filterHeight=5,
                                 filterWidth=5,
                                 activation=MA.Tanh(),
                                 pooler=maxPool,
                                 name="conv2")

        f = MCONV.Flatten(name="flat")
        h = ML.Hidden(5,
                      activation=MA.Tanh(),
                      decorators=[MD.GlorotTanhInit()],
                      regularizations=[MR.L1(0), MR.L2(0.0001)],
                      name="hid")
        o = ML.SoftmaxClassifier(2,
                                 decorators=[],
                                 learningScenario=ls,
                                 costObject=cost,
                                 name="out",
                                 regularizations=[])

        self.model = i > c1 > c2 > f > h > o
예제 #2
0
    def __init__(self, ls, cost):
        maxPool = MCONV.MaxPooling2D(2, 2)

        i = MCONV.Input(nbChannels=1, height=28, width=28, name='inp')

        c1 = MCONV.Convolution2D(nbFilters=20,
                                 filterHeight=5,
                                 filterWidth=5,
                                 activation=MA.Tanh(),
                                 pooler=maxPool,
                                 name="conv1")

        c2 = MCONV.Convolution2D(nbFilters=50,
                                 filterHeight=5,
                                 filterWidth=5,
                                 activation=MA.Tanh(),
                                 pooler=maxPool,
                                 name="conv2")

        #needed for the transition to a fully connected layer
        f = MCONV.Flatten(name="flat")
        h = ML.Hidden(500,
                      activation=MA.Tanh(),
                      decorators=[],
                      regularizations=[],
                      name="hid")
        o = ML.SoftmaxClassifier(10,
                                 decorators=[],
                                 learningScenario=ls,
                                 costObject=cost,
                                 name="out",
                                 regularizations=[])

        self.model = i > c1 > c2 > f > h > o
        print self.model
예제 #3
0
    def __init__(self, ls, cost):
        maxPool = MCONV.MaxPooling2D(3, 3)
        i = MCONV.Input(nbChannels=1, height=100, width=100, name='inp')

        c1 = MCONV.Convolution2D(nbFilters=10,
                                 filterHeight=3,
                                 filterWidth=3,
                                 activation=MA.Max_norm(),
                                 pooler=maxPool,
                                 name="conv1")
        c3 = MCONV.Convolution2D(nbFilters=20,
                                 filterHeight=3,
                                 filterWidth=3,
                                 activation=MA.Max_norm(),
                                 pooler=maxPool,
                                 name="conv3")

        c2 = MCONV.Convolution2D(nbFilters=10,
                                 filterHeight=3,
                                 filterWidth=3,
                                 activation=MA.Max_norm(),
                                 pooler=maxPool,
                                 name="conv2")

        f = MCONV.Flatten(name="flat")
        h = ML.Hidden(2048,
                      activation=MA.Max_norm(),
                      decorators=[MD.BinomialDropout(0.7)],
                      regularizations=[],
                      name="hid")

        o = ML.SoftmaxClassifier(2,
                                 decorators=[],
                                 learningScenario=ls,
                                 costObject=cost,
                                 name="out",
                                 regularizations=[])

        self.model = i > c1 > c3 > c2 > f > h > o
예제 #4
0
def classes(targets):
        x = [numpy.abs(numpy.argmax(i)-1) for i in targets[0]]
        return x

maxPool = MCONV.MaxPooling2D(2,2)
ls = MS.MomentumGradientDescent(lr = 1e-1, momentum = 0.95) 
cost = MC.NegativeLogLikelihood()
miniBatchSize = 100
trainfile = "../flip_grey_train_dataset.p"
validfile = "../flip_grey_valid_dataset.p"
testfile = "../test_set.p"
runprefix = "HD2"


i = MCONV.Input(nbChannels = 1, height = 100, width = 100, name = 'inp')
		
c1 = MCONV.Convolution2D( 
	nbFilters = 15,
	filterHeight = 3,
	filterWidth = 3,
	activation = MA.Max_norm(),
	pooler = maxPool,
	name = "conv1"
)

c3 = MCONV.Convolution2D( 
	nbFilters = 25,
	filterHeight = 3,
	filterWidth = 3,
	activation = MA.Max_norm(),