def test_learn(): layerContainer = [ #3, 150, 150 conv.ConvLayer(optimizer=adam.AdamConv(), load_path="ff2c1", filtershape=(32, 3, 3, 3), stride_length=1, pool=pool.PoolLayer(pool_size=(2, 2), stride_length=2), ishape=(3, 150, 150)), #32, 74, 74 conv.ConvLayer(optimizer=adam.AdamConv(), load_path="ff2c2", filtershape=(64, 32, 3, 3), stride_length=1, pool=pool.PoolLayer(pool_size=(2, 2), stride_length=2), ishape=(32, 74, 74)), #64, 36, 36 conv.ConvLayer(optimizer=adam.AdamConv(), load_path="ff2c3", filtershape=(128, 64, 3, 3), stride_length=1, pool=pool.PoolLayer(pool_size=(2, 2), stride_length=2), ishape=(64, 36, 36)), #128, 17, 17 fcnetwork.FCLayer(optimizer=adam.AdamFC(), load_path="ff2fcn1", arch=[36992, 512, 128], activation_func="relu", is_classifier=False), softmax.SoftmaxLayer(optimizer=adam.AdamFC(), load_path="ff2softm", arch=[128, 5]) ] # signal.signal(signal.SIGINT, signal_handler) ## here learning rate is useless model_FAndF = model.Model(learning_rate=0.001, dataset=None, layerContainer=layerContainer) pic = iml.ImageLoader.getOutputNpArray(example1, crop=True, crop_size=(0, 0, 150, 150)) y = model_FAndF.compute(pic) print(y)
def simple_adam_optimizer_test(self): train, test_data = dataloader.load_some_flowers(5, 0, crop_size=(0, 0, 150, 150)) x = list(train)[0][0] c1 = conv.ConvLayer(optimizer=adam.AdamConv(), filtershape=(32, 3, 3, 3), stride_length=1, pool=pool.PoolLayer(pool_size=(2, 2), stride_length=2), ishape=(3, 150, 150)) initFilters, initBiases = c1.getFiltersAndBiases() out = c1.compute(x, learn=True) false_delta = numpy.random.randn(*(32, 74, 74)) c1.learn(false_delta) c1.modify_weights(learning_rate=0.1, batch_size=1) finalFilters, finalBiases = c1.getFiltersAndBiases() # print(f"init f {initFilters}") # print(f"final f {finalFilters}") self.assertFalse((initFilters == finalFilters).all()) self.assertFalse((initBiases == finalBiases).all())
def flowerAndFun(path=example1): input = iml.ImageLoader.getOutputNpArray(path, crop=True, crop_size=(0, 0, 150, 150)) layerContainer = [ #3, 150, 150 conv.ConvLayer(optimizer=adam.AdamConv(), filtershape=(32, 3, 3, 3), stride_length=1, pool=pool.PoolLayer(pool_size=(2, 2), stride_length=2), ishape=(3, 150, 150)), #32, 74, 74 conv.ConvLayer(optimizer=adam.AdamConv(), filtershape=(64, 32, 3, 3), stride_length=1, pool=pool.PoolLayer(pool_size=(2, 2), stride_length=2), ishape=(32, 74, 74)), #64, 36, 36 conv.ConvLayer(optimizer=adam.AdamConv(), filtershape=(128, 64, 3, 3), stride_length=1, pool=pool.PoolLayer(pool_size=(2, 2), stride_length=2), ishape=(64, 36, 36)), #128, 17, 17 fcnetwork.FCLayer(optimizer=adam.AdamFC(), arch=[36992, 512, 128, 5]) ] learning_rate = 0.0001 model_FAndF = model.Model(learning_rate=learning_rate, dataset=None, layerContainer=layerContainer) # output = model_FAndF.compute(input, learn=True) # model_FAndF.soft_learn() model_FAndF.test_learn(epoch=50)
def flowerAndFun2(path=example1): def signal_handler(sig, frame): # saveModel.saveLayers(["ff2c1", "ff2c2", "ff2c3", "ff2fcn1", "ff2softm"]) pic = iml.ImageLoader.getOutputNpArray(example1, crop=True, crop_size=(0, 0, 150, 150)) y = saveModel.compute(pic) saveModel.saveLayers([ "ff2c1", "d1", "ff2c2", "d2", "ff2c3", "d3", "ff2fcn1", "d4", "ff2softm" ]) print(y) sys.exit(0) input = iml.ImageLoader.getOutputNpArray(path, crop=True, crop_size=(0, 0, 150, 150)) # layerContainer = [ # #3, 150, 150 # conv.ConvLayer(optimizer=adam.AdamConv(), filtershape=(32, 3, 3, 3), stride_length=1, pool=pool.PoolLayer(pool_size=(2, 2), stride_length=2), ishape=(3, 150, 150)), # dropout.DropoutLayer(p=0.2, ishape=(32, 74, 74)), # #32, 74, 74 # conv.ConvLayer(optimizer=adam.AdamConv(), filtershape=(64, 32, 3, 3), stride_length=1, pool=pool.PoolLayer(pool_size=(2, 2), stride_length=2), ishape=(32, 74, 74)), # dropout.DropoutLayer(p=0.2, ishape=(64, 36, 36)), # #64, 36, 36 # conv.ConvLayer(optimizer=adam.AdamConv(), filtershape=(128, 64, 3, 3), stride_length=1, pool=pool.PoolLayer(pool_size=(2, 2), stride_length=2), ishape=(64, 36, 36)), # dropout.DropoutLayer(p=0.2, ishape=(128, 17, 17)), # #128, 17, 17 # fcnetwork.FCLayer(optimizer=adam.AdamFC(), arch=[36992, 512, 128], activation_func="relu", is_classifier=False), # dropout.DropoutLayer(p=0.2, ishape=(128,)), # softmax.SoftmaxLayer(optimizer=adam.AdamFC(), arch=[128, 5]) # ] # # load net layerContainer = [ #3, 150, 150 conv.ConvLayer(optimizer=adam.AdamConv(), load_path="ff2c1", filtershape=(32, 3, 3, 3), stride_length=1, pool=pool.PoolLayer(pool_size=(2, 2), stride_length=2), ishape=(3, 150, 150)), dropout.DropoutLayer(p=0.2, ishape=(32, 74, 74)), #32, 74, 74 conv.ConvLayer(optimizer=adam.AdamConv(), load_path="ff2c2", filtershape=(64, 32, 3, 3), stride_length=1, pool=pool.PoolLayer(pool_size=(2, 2), stride_length=2), ishape=(32, 74, 74)), dropout.DropoutLayer(p=0.2, ishape=(64, 36, 36)), #64, 36, 36 conv.ConvLayer(optimizer=adam.AdamConv(), load_path="ff2c3", filtershape=(128, 64, 3, 3), stride_length=1, pool=pool.PoolLayer(pool_size=(2, 2), stride_length=2), ishape=(64, 36, 36)), dropout.DropoutLayer(p=0.2, ishape=(128, 17, 17)), #128, 17, 17 fcnetwork.FCLayer(optimizer=adam.AdamFC(), load_path="ff2fcn1", arch=[36992, 512, 128], activation_func="relu", is_classifier=False), dropout.DropoutLayer(p=0.2, ishape=(128, )), softmax.SoftmaxLayer(optimizer=adam.AdamFC(), load_path="ff2softm", arch=[128, 5]) ] signal.signal(signal.SIGINT, signal_handler) ## here learning rate is useless model_FAndF = model.Model(learning_rate=0.001, dataset=None, layerContainer=layerContainer) saveModel = model_FAndF # saveModel.saveLayers(["ff2c1", "ff2c2", "ff2c3", "ff2fcn1", "ff2softm"]) model_FAndF.test_learn(epoch=50)
def flowerAndFunModel(path=example1): #it crops by 224x224 by default print(f"Model load this picture as input: {path}") input = iml.ImageLoader.getOutputNpArray(path, crop=True, crop_size=(0, 0, 150, 150)) dir = "./tensorfiles" if not os.path.exists(dir + "/" + "ff2c1" + ".ws1.npy"): layerContainer = [ #3, 150, 150 conv.ConvLayer(optimizer=adam.AdamConv(), filtershape=(32, 3, 3, 3), stride_length=1, pool=pool.PoolLayer(pool_size=(2, 2), stride_length=2), ishape=(3, 150, 150)), #32, 74, 74 conv.ConvLayer(optimizer=adam.AdamConv(), filtershape=(64, 32, 3, 3), stride_length=1, pool=pool.PoolLayer(pool_size=(2, 2), stride_length=2), ishape=(32, 74, 74)), #64, 36, 36 conv.ConvLayer(optimizer=adam.AdamConv(), filtershape=(128, 64, 3, 3), stride_length=1, pool=pool.PoolLayer(pool_size=(2, 2), stride_length=2), ishape=(64, 36, 36)), #128, 17, 17 fcnetwork.FCLayer(optimizer=adam.AdamFC(), arch=[36992, 512, 128], activation_func="relu", is_classifier=False), softmax.SoftmaxLayer(optimizer=adam.AdamFC(), arch=[128, 5]) ] ffM = model.Model(learning_rate=None, dataset=None, layerContainer=layerContainer) ffM.saveLayers(["ff2c1", "ff2c2", "ff2c3", "ff2fcn1", "ff2softm"]) layerContainer = [ conv.ConvLayer(optimizer=adam.AdamConv(), load_path="ff2c1", filtershape=(32, 3, 3, 3), stride_length=1, pool=pool.PoolLayer(pool_size=(2, 2), stride_length=2), ishape=(3, 150, 150)), #32, 74, 74 conv.ConvLayer(optimizer=adam.AdamConv(), load_path="ff2c2", filtershape=(64, 32, 3, 3), stride_length=1, pool=pool.PoolLayer(pool_size=(2, 2), stride_length=2), ishape=(32, 74, 74)), #64, 36, 36 conv.ConvLayer(optimizer=adam.AdamConv(), load_path="ff2c3", filtershape=(128, 64, 3, 3), stride_length=1, pool=pool.PoolLayer(pool_size=(2, 2), stride_length=2), ishape=(64, 36, 36)), #128, 17, 17 fcnetwork.FCLayer(optimizer=adam.AdamFC(), load_path="ff2fcn1", arch=[36992, 512, 128], activation_func="relu", is_classifier=False), softmax.SoftmaxLayer(optimizer=adam.AdamFC(), load_path="ff2softm", arch=[128, 5]) ] ffM = model.Model(learning_rate=None, dataset=None, layerContainer=layerContainer) try: output = ffM.compute(input) except: print("error occured in zf5 model") return "error" return return_response(output) # print(zf5model()) # overfeat() # a = numpy.array( # [[1, 2, 3, 4, 5, 6], # [1, 2, 3, 4, 5, 6], # [1, 2, 3, 4, 5, 6], # [1, 2, 3, 4, 5, 6], # [1, 2, 3, 4, 5, 6], # [1, 2, 3, 4, 5, 6]])