def ImageNetInceptionV2(outnode, model_name, target, opt, learn_r, epch, dropout_keep_rate, save_model=False): def block35(net, scale=1.0, activation="relu"): tower_conv = relu( batch_normalization( conv_2d(net, 32, 1, bias=False, activation=None, name='Conv2d_1x1'))) tower_conv1_0 = relu( batch_normalization( conv_2d(net, 32, 1, bias=False, activation=None, name='Conv2d_0a_1x1'))) tower_conv1_1 = relu( batch_normalization( conv_2d(tower_conv1_0, 32, 3, bias=False, activation=None, name='Conv2d_0b_3x3'))) tower_conv2_0 = relu( batch_normalization( conv_2d(net, 32, 1, bias=False, activation=None, name='Conv2d_0a_1x1'))) tower_conv2_1 = relu( batch_normalization( conv_2d(tower_conv2_0, 48, 3, bias=False, activation=None, name='Conv2d_0b_3x3'))) tower_conv2_2 = relu( batch_normalization( conv_2d(tower_conv2_1, 64, 3, bias=False, activation=None, name='Conv2d_0c_3x3'))) tower_mixed = merge([tower_conv, tower_conv1_1, tower_conv2_2], mode='concat', axis=3) tower_out = relu( batch_normalization( conv_2d(tower_mixed, net.get_shape()[3], 1, bias=False, activation=None, name='Conv2d_1x1'))) net += scale * tower_out if activation: if isinstance(activation, str): net = activations.get(activation)(net) elif hasattr(activation, '__call__'): net = activation(net) else: raise ValueError("Invalid Activation.") return net def block17(net, scale=1.0, activation="relu"): tower_conv = relu( batch_normalization( conv_2d(net, 192, 1, bias=False, activation=None, name='Conv2d_1x1'))) tower_conv_1_0 = relu( batch_normalization( conv_2d(net, 128, 1, bias=False, activation=None, name='Conv2d_0a_1x1'))) tower_conv_1_1 = relu( batch_normalization( conv_2d(tower_conv_1_0, 160, [1, 7], bias=False, activation=None, name='Conv2d_0b_1x7'))) tower_conv_1_2 = relu( batch_normalization( conv_2d(tower_conv_1_1, 192, [7, 1], bias=False, activation=None, name='Conv2d_0c_7x1'))) tower_mixed = merge([tower_conv, tower_conv_1_2], mode='concat', axis=3) tower_out = relu( batch_normalization( conv_2d(tower_mixed, net.get_shape()[3], 1, bias=False, activation=None, name='Conv2d_1x1'))) net += scale * tower_out if activation: if isinstance(activation, str): net = activations.get(activation)(net) elif hasattr(activation, '__call__'): net = activation(net) else: raise ValueError("Invalid Activation.") return net def block8(net, scale=1.0, activation="relu"): tower_conv = relu( batch_normalization( conv_2d(net, 192, 1, bias=False, activation=None, name='Conv2d_1x1'))) tower_conv1_0 = relu( batch_normalization( conv_2d(net, 192, 1, bias=False, activation=None, name='Conv2d_0a_1x1'))) tower_conv1_1 = relu( batch_normalization( conv_2d(tower_conv1_0, 224, [1, 3], bias=False, activation=None, name='Conv2d_0b_1x3'))) tower_conv1_2 = relu( batch_normalization( conv_2d(tower_conv1_1, 256, [3, 1], bias=False, name='Conv2d_0c_3x1'))) tower_mixed = merge([tower_conv, tower_conv1_2], mode='concat', axis=3) tower_out = relu( batch_normalization( conv_2d(tower_mixed, net.get_shape()[3], 1, bias=False, activation=None, name='Conv2d_1x1'))) net += scale * tower_out if activation: if isinstance(activation, str): net = activations.get(activation)(net) elif hasattr(activation, '__call__'): net = activation(net) else: raise ValueError("Invalid Activation.") return net # default = 0.8 dropout_keep_prob = dropout_keep_rate network = input_data(shape=[None, IMG_SIZE, IMG_SIZE, 1], name='input') conv1a_3_3 = relu( batch_normalization( conv_2d(network, 32, 3, strides=2, bias=False, padding='VALID', activation=None, name='Conv2d_1a_3x3'))) conv2a_3_3 = relu( batch_normalization( conv_2d(conv1a_3_3, 32, 3, bias=False, padding='VALID', activation=None, name='Conv2d_2a_3x3'))) conv2b_3_3 = relu( batch_normalization( conv_2d(conv2a_3_3, 64, 3, bias=False, activation=None, name='Conv2d_2b_3x3'))) maxpool3a_3_3 = max_pool_2d(conv2b_3_3, 3, strides=2, padding='VALID', name='MaxPool_3a_3x3') conv3b_1_1 = relu( batch_normalization( conv_2d(maxpool3a_3_3, 80, 1, bias=False, padding='VALID', activation=None, name='Conv2d_3b_1x1'))) conv4a_3_3 = relu( batch_normalization( conv_2d(conv3b_1_1, 192, 3, bias=False, padding='VALID', activation=None, name='Conv2d_4a_3x3'))) maxpool5a_3_3 = max_pool_2d(conv4a_3_3, 3, strides=2, padding='VALID', name='MaxPool_5a_3x3') tower_conv = relu( batch_normalization( conv_2d(maxpool5a_3_3, 96, 1, bias=False, activation=None, name='Conv2d_5b_b0_1x1'))) tower_conv1_0 = relu( batch_normalization( conv_2d(maxpool5a_3_3, 48, 1, bias=False, activation=None, name='Conv2d_5b_b1_0a_1x1'))) tower_conv1_1 = relu( batch_normalization( conv_2d(tower_conv1_0, 64, 5, bias=False, activation=None, name='Conv2d_5b_b1_0b_5x5'))) tower_conv2_0 = relu( batch_normalization( conv_2d(maxpool5a_3_3, 64, 1, bias=False, activation=None, name='Conv2d_5b_b2_0a_1x1'))) tower_conv2_1 = relu( batch_normalization( conv_2d(tower_conv2_0, 96, 3, bias=False, activation=None, name='Conv2d_5b_b2_0b_3x3'))) tower_conv2_2 = relu( batch_normalization( conv_2d(tower_conv2_1, 96, 3, bias=False, activation=None, name='Conv2d_5b_b2_0c_3x3'))) tower_pool3_0 = avg_pool_2d(maxpool5a_3_3, 3, strides=1, padding='same', name='AvgPool_5b_b3_0a_3x3') tower_conv3_1 = relu( batch_normalization( conv_2d(tower_pool3_0, 64, 1, bias=False, activation=None, name='Conv2d_5b_b3_0b_1x1'))) tower_5b_out = merge( [tower_conv, tower_conv1_1, tower_conv2_2, tower_conv3_1], mode='concat', axis=3) net = repeat(tower_5b_out, 10, block35, scale=0.17) tower_conv = relu( batch_normalization( conv_2d(net, 384, 3, bias=False, strides=2, activation=None, padding='VALID', name='Conv2d_6a_b0_0a_3x3'))) tower_conv1_0 = relu( batch_normalization( conv_2d(net, 256, 1, bias=False, activation=None, name='Conv2d_6a_b1_0a_1x1'))) tower_conv1_1 = relu( batch_normalization( conv_2d(tower_conv1_0, 256, 3, bias=False, activation=None, name='Conv2d_6a_b1_0b_3x3'))) tower_conv1_2 = relu( batch_normalization( conv_2d(tower_conv1_1, 384, 3, bias=False, strides=2, padding='VALID', activation=None, name='Conv2d_6a_b1_0c_3x3'))) tower_pool = max_pool_2d(net, 3, strides=2, padding='VALID', name='MaxPool_1a_3x3') net = merge([tower_conv, tower_conv1_2, tower_pool], mode='concat', axis=3) net = repeat(net, 20, block17, scale=0.1) tower_conv = relu( batch_normalization( conv_2d(net, 256, 1, bias=False, activation=None, name='Conv2d_0a_1x1'))) tower_conv0_1 = relu( batch_normalization( conv_2d(tower_conv, 384, 3, bias=False, strides=2, padding='VALID', activation=None, name='Conv2d_0a_1x1'))) tower_conv1 = relu( batch_normalization( conv_2d(net, 256, 1, bias=False, padding='VALID', activation=None, name='Conv2d_0a_1x1'))) tower_conv1_1 = relu( batch_normalization( conv_2d(tower_conv1, 288, 3, bias=False, strides=2, padding='VALID', activation=None, name='COnv2d_1a_3x3'))) tower_conv2 = relu( batch_normalization( conv_2d(net, 256, 1, bias=False, activation=None, name='Conv2d_0a_1x1'))) tower_conv2_1 = relu( batch_normalization( conv_2d(tower_conv2, 288, 3, bias=False, name='Conv2d_0b_3x3', activation=None))) tower_conv2_2 = relu( batch_normalization( conv_2d(tower_conv2_1, 320, 3, bias=False, strides=2, padding='VALID', activation=None, name='Conv2d_1a_3x3'))) tower_pool = max_pool_2d(net, 3, strides=2, padding='VALID', name='MaxPool_1a_3x3') net = merge([tower_conv0_1, tower_conv1_1, tower_conv2_2, tower_pool], mode='concat', axis=3) net = repeat(net, 9, block8, scale=0.2) net = block8(net, activation=None) net = relu( batch_normalization( conv_2d(net, 1536, 1, bias=False, activation=None, name='Conv2d_7b_1x1'))) net = avg_pool_2d(net, net.get_shape().as_list()[1:3], strides=2, padding='VALID', name='AvgPool_1a_8x8') net = flatten(net) net = dropout(net, dropout_keep_prob) loss = fully_connected(net, outnode, activation='softmax') str_model_name = "{}_{}_{}_{}_{}_{}".format(model_name, target, opt, learn_r, epch, dropout_keep_rate) network = tflearn.regression(loss, optimizer=opt, loss='categorical_crossentropy', learning_rate=learn_r, name='targets') model = None if save_model: model = tflearn.DNN( network, checkpoint_path='../tflearnModels/{}'.format(str_model_name), best_checkpoint_path='../tflearnModels/bestModels/best_{}'.format( str_model_name), max_checkpoints=1, tensorboard_verbose=0, tensorboard_dir="../tflearnLogs/{}/".format(str_model_name)) else: model = tflearn.DNN(network) return model
batch_normalization(conv_2d(tower_conv1_0, 64, 5, bias=False, activation=None, name='Conv2d_5b_b1_0b_5x5'))) tower_conv2_0 = relu( batch_normalization(conv_2d(maxpool5a_3_3, 64, 1, bias=False, activation=None, name='Conv2d_5b_b2_0a_1x1'))) tower_conv2_1 = relu( batch_normalization(conv_2d(tower_conv2_0, 96, 3, bias=False, activation=None, name='Conv2d_5b_b2_0b_3x3'))) tower_conv2_2 = relu( batch_normalization(conv_2d(tower_conv2_1, 96, 3, bias=False, activation=None, name='Conv2d_5b_b2_0c_3x3'))) tower_pool3_0 = avg_pool_2d(maxpool5a_3_3, 3, strides=1, padding='same', name='AvgPool_5b_b3_0a_3x3') tower_conv3_1 = relu( batch_normalization(conv_2d(tower_pool3_0, 64, 1, bias=False, activation=None, name='Conv2d_5b_b3_0b_1x1'))) tower_5b_out = merge([tower_conv, tower_conv1_1, tower_conv2_2, tower_conv3_1], mode='concat', axis=3) net = repeat(tower_5b_out, 10, block35, scale=0.17) tower_conv = relu(batch_normalization( conv_2d(net, 384, 3, bias=False, strides=2, activation=None, padding='VALID', name='Conv2d_6a_b0_0a_3x3'))) tower_conv1_0 = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, activation=None, name='Conv2d_6a_b1_0a_1x1'))) tower_conv1_1 = relu( batch_normalization(conv_2d(tower_conv1_0, 256, 3, bias=False, activation=None, name='Conv2d_6a_b1_0b_3x3'))) tower_conv1_2 = relu(batch_normalization( conv_2d(tower_conv1_1, 384, 3, bias=False, strides=2, padding='VALID', activation=None, name='Conv2d_6a_b1_0c_3x3'))) tower_pool = max_pool_2d(net, 3, strides=2, padding='VALID', name='MaxPool_1a_3x3') net = merge([tower_conv, tower_conv1_2, tower_pool], mode='concat', axis=3) net = repeat(net, 20, block17, scale=0.1) tower_conv = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, activation=None, name='Conv2d_0a_1x1'))) tower_conv0_1 = relu(batch_normalization(
def inception_v2(width, height, learning_rate): num_classes = 17 dropout_keep_prob = 0.8 network = input_data(shape=[None, width, height, 3]) conv1a_3_3 = relu( batch_normalization( conv_2d(network, 32, 3, strides=2, bias=False, padding='VALID', activation=None, name='Conv2d_1a_3x3'))) conv2a_3_3 = relu( batch_normalization( conv_2d(conv1a_3_3, 32, 3, bias=False, padding='VALID', activation=None, name='Conv2d_2a_3x3'))) conv2b_3_3 = relu( batch_normalization( conv_2d(conv2a_3_3, 64, 3, bias=False, activation=None, name='Conv2d_2b_3x3'))) maxpool3a_3_3 = max_pool_2d(conv2b_3_3, 3, strides=2, padding='VALID', name='MaxPool_3a_3x3') conv3b_1_1 = relu( batch_normalization( conv_2d(maxpool3a_3_3, 80, 1, bias=False, padding='VALID', activation=None, name='Conv2d_3b_1x1'))) conv4a_3_3 = relu( batch_normalization( conv_2d(conv3b_1_1, 192, 3, bias=False, padding='VALID', activation=None, name='Conv2d_4a_3x3'))) maxpool5a_3_3 = max_pool_2d(conv4a_3_3, 3, strides=2, padding='VALID', name='MaxPool_5a_3x3') tower_conv = relu( batch_normalization( conv_2d(maxpool5a_3_3, 96, 1, bias=False, activation=None, name='Conv2d_5b_b0_1x1'))) tower_conv1_0 = relu( batch_normalization( conv_2d(maxpool5a_3_3, 48, 1, bias=False, activation=None, name='Conv2d_5b_b1_0a_1x1'))) tower_conv1_1 = relu( batch_normalization( conv_2d(tower_conv1_0, 64, 5, bias=False, activation=None, name='Conv2d_5b_b1_0b_5x5'))) tower_conv2_0 = relu( batch_normalization( conv_2d(maxpool5a_3_3, 64, 1, bias=False, activation=None, name='Conv2d_5b_b2_0a_1x1'))) tower_conv2_1 = relu( batch_normalization( conv_2d(tower_conv2_0, 96, 3, bias=False, activation=None, name='Conv2d_5b_b2_0b_3x3'))) tower_conv2_2 = relu( batch_normalization( conv_2d(tower_conv2_1, 96, 3, bias=False, activation=None, name='Conv2d_5b_b2_0c_3x3'))) tower_pool3_0 = avg_pool_2d(maxpool5a_3_3, 3, strides=1, padding='same', name='AvgPool_5b_b3_0a_3x3') tower_conv3_1 = relu( batch_normalization( conv_2d(tower_pool3_0, 64, 1, bias=False, activation=None, name='Conv2d_5b_b3_0b_1x1'))) tower_5b_out = merge( [tower_conv, tower_conv1_1, tower_conv2_2, tower_conv3_1], mode='concat', axis=3) net = repeat(tower_5b_out, 10, block35, scale=0.17) tower_conv = relu( batch_normalization( conv_2d(net, 384, 3, bias=False, strides=2, activation=None, padding='VALID', name='Conv2d_6a_b0_0a_3x3'))) tower_conv1_0 = relu( batch_normalization( conv_2d(net, 256, 1, bias=False, activation=None, name='Conv2d_6a_b1_0a_1x1'))) tower_conv1_1 = relu( batch_normalization( conv_2d(tower_conv1_0, 256, 3, bias=False, activation=None, name='Conv2d_6a_b1_0b_3x3'))) tower_conv1_2 = relu( batch_normalization( conv_2d(tower_conv1_1, 384, 3, bias=False, strides=2, padding='VALID', activation=None, name='Conv2d_6a_b1_0c_3x3'))) tower_pool = max_pool_2d(net, 3, strides=2, padding='VALID', name='MaxPool_1a_3x3') net = merge([tower_conv, tower_conv1_2, tower_pool], mode='concat', axis=3) net = repeat(net, 20, block17, scale=0.1) tower_conv = relu( batch_normalization( conv_2d(net, 256, 1, bias=False, activation=None, name='Conv2d_0a_1x1'))) tower_conv0_1 = relu( batch_normalization( conv_2d(tower_conv, 384, 3, bias=False, strides=2, padding='VALID', activation=None, name='Conv2d_0a_1x1'))) tower_conv1 = relu( batch_normalization( conv_2d(net, 256, 1, bias=False, padding='VALID', activation=None, name='Conv2d_0a_1x1'))) tower_conv1_1 = relu( batch_normalization( conv_2d(tower_conv1, 288, 3, bias=False, strides=2, padding='VALID', activation=None, name='COnv2d_1a_3x3'))) tower_conv2 = relu( batch_normalization( conv_2d(net, 256, 1, bias=False, activation=None, name='Conv2d_0a_1x1'))) tower_conv2_1 = relu( batch_normalization( conv_2d(tower_conv2, 288, 3, bias=False, name='Conv2d_0b_3x3', activation=None))) tower_conv2_2 = relu( batch_normalization( conv_2d(tower_conv2_1, 320, 3, bias=False, strides=2, padding='VALID', activation=None, name='Conv2d_1a_3x3'))) tower_pool = max_pool_2d(net, 3, strides=2, padding='VALID', name='MaxPool_1a_3x3') net = merge([tower_conv0_1, tower_conv1_1, tower_conv2_2, tower_pool], mode='concat', axis=3) net = repeat(net, 9, block8, scale=0.2) net = block8(net, activation=None) net = relu( batch_normalization( conv_2d(net, 1536, 1, bias=False, activation=None, name='Conv2d_7b_1x1'))) net = avg_pool_2d(net, net.get_shape().as_list()[1:3], strides=2, padding='VALID', name='AvgPool_1a_8x8') net = flatten(net) net = dropout(net, dropout_keep_prob) loss = fully_connected(net, num_classes, activation='softmax') network = tflearn.regression(loss, optimizer='RMSprop', loss='categorical_crossentropy', learning_rate=learning_rate) #learning_rate=0.0001) model = tflearn.DNN(network, checkpoint_path='inception_resnet_v2', max_checkpoints=1, tensorboard_verbose=2, tensorboard_dir="./tflearn_logs/") return model
def inception_v3(width, height, frame_count, lr, output=9, model_name='sentnet_color.model'): network = input_data(shape=[None, width, height, 3], name='input') conv1_7_7 = conv_2d(network, 64, 7, strides=2, activation='relu', name='conv1_7_7_s2') pool1_3_3 = max_pool_2d(conv1_7_7, 3, strides=2) pool1_3_3 = local_response_normalization(pool1_3_3) conv2_3_3_reduce = conv_2d(pool1_3_3, 64, 1, activation='relu', name='conv2_3_3_reduce') conv2_3_3 = conv_2d(conv2_3_3_reduce, 192, 3, activation='relu', name='conv2_3_3') conv2_3_3 = local_response_normalization(conv2_3_3) pool2_3_3 = max_pool_2d(conv2_3_3, kernel_size=3, strides=2, name='pool2_3_3_s2') inception_3a_1_1 = conv_2d(pool2_3_3, 64, 1, activation='relu', name='inception_3a_1_1') inception_3a_3_3_reduce = conv_2d(pool2_3_3, 96, 1, activation='relu', name='inception_3a_3_3_reduce') inception_3a_3_3 = conv_2d(inception_3a_3_3_reduce, 128, filter_size=3, activation='relu', name='inception_3a_3_3') inception_3a_5_5_reduce = conv_2d(pool2_3_3, 16, filter_size=1, activation='relu', name='inception_3a_5_5_reduce') inception_3a_5_5 = conv_2d(inception_3a_5_5_reduce, 32, filter_size=5, activation='relu', name='inception_3a_5_5') inception_3a_pool = max_pool_2d( pool2_3_3, kernel_size=3, strides=1, ) inception_3a_pool_1_1 = conv_2d(inception_3a_pool, 32, filter_size=1, activation='relu', name='inception_3a_pool_1_1') # merge the inception_3a__ inception_3a_output = merge([ inception_3a_1_1, inception_3a_3_3, inception_3a_5_5, inception_3a_pool_1_1 ], mode='concat', axis=3) inception_3b_1_1 = conv_2d(inception_3a_output, 128, filter_size=1, activation='relu', name='inception_3b_1_1') inception_3b_3_3_reduce = conv_2d(inception_3a_output, 128, filter_size=1, activation='relu', name='inception_3b_3_3_reduce') inception_3b_3_3 = conv_2d(inception_3b_3_3_reduce, 192, filter_size=3, activation='relu', name='inception_3b_3_3') inception_3b_5_5_reduce = conv_2d(inception_3a_output, 32, filter_size=1, activation='relu', name='inception_3b_5_5_reduce') inception_3b_5_5 = conv_2d(inception_3b_5_5_reduce, 96, filter_size=5, name='inception_3b_5_5') inception_3b_pool = max_pool_2d(inception_3a_output, kernel_size=3, strides=1, name='inception_3b_pool') inception_3b_pool_1_1 = conv_2d(inception_3b_pool, 64, filter_size=1, activation='relu', name='inception_3b_pool_1_1') #merge the inception_3b_* inception_3b_output = merge([ inception_3b_1_1, inception_3b_3_3, inception_3b_5_5, inception_3b_pool_1_1 ], mode='concat', axis=3, name='inception_3b_output') pool3_3_3 = max_pool_2d(inception_3b_output, kernel_size=3, strides=2, name='pool3_3_3') inception_4a_1_1 = conv_2d(pool3_3_3, 192, filter_size=1, activation='relu', name='inception_4a_1_1') inception_4a_3_3_reduce = conv_2d(pool3_3_3, 96, filter_size=1, activation='relu', name='inception_4a_3_3_reduce') inception_4a_3_3 = conv_2d(inception_4a_3_3_reduce, 208, filter_size=3, activation='relu', name='inception_4a_3_3') inception_4a_5_5_reduce = conv_2d(pool3_3_3, 16, filter_size=1, activation='relu', name='inception_4a_5_5_reduce') inception_4a_5_5 = conv_2d(inception_4a_5_5_reduce, 48, filter_size=5, activation='relu', name='inception_4a_5_5') inception_4a_pool = max_pool_2d(pool3_3_3, kernel_size=3, strides=1, name='inception_4a_pool') inception_4a_pool_1_1 = conv_2d(inception_4a_pool, 64, filter_size=1, activation='relu', name='inception_4a_pool_1_1') inception_4a_output = merge([ inception_4a_1_1, inception_4a_3_3, inception_4a_5_5, inception_4a_pool_1_1 ], mode='concat', axis=3, name='inception_4a_output') #************************************************************************# inception_4a_output = repeat(inception_4a_output, 10, block_1, scale=0.15) #************************************************************************# inception_4b_1_1 = conv_2d(inception_4a_output, 160, filter_size=1, activation='relu', name='inception_4a_1_1') inception_4b_3_3_reduce = conv_2d(inception_4a_output, 112, filter_size=1, activation='relu', name='inception_4b_3_3_reduce') inception_4b_3_3 = conv_2d(inception_4b_3_3_reduce, 224, filter_size=3, activation='relu', name='inception_4b_3_3') inception_4b_5_5_reduce = conv_2d(inception_4a_output, 24, filter_size=1, activation='relu', name='inception_4b_5_5_reduce') inception_4b_5_5 = conv_2d(inception_4b_5_5_reduce, 64, filter_size=5, activation='relu', name='inception_4b_5_5') inception_4b_pool = max_pool_2d(inception_4a_output, kernel_size=3, strides=1, name='inception_4b_pool') inception_4b_pool_1_1 = conv_2d(inception_4b_pool, 64, filter_size=1, activation='relu', name='inception_4b_pool_1_1') inception_4b_output = merge([ inception_4b_1_1, inception_4b_3_3, inception_4b_5_5, inception_4b_pool_1_1 ], mode='concat', axis=3, name='inception_4b_output') inception_4c_1_1 = conv_2d(inception_4b_output, 128, filter_size=1, activation='relu', name='inception_4c_1_1') inception_4c_3_3_reduce = conv_2d(inception_4b_output, 128, filter_size=1, activation='relu', name='inception_4c_3_3_reduce') inception_4c_3_3 = conv_2d(inception_4c_3_3_reduce, 256, filter_size=3, activation='relu', name='inception_4c_3_3') inception_4c_5_5_reduce = conv_2d(inception_4b_output, 24, filter_size=1, activation='relu', name='inception_4c_5_5_reduce') inception_4c_5_5 = conv_2d(inception_4c_5_5_reduce, 64, filter_size=5, activation='relu', name='inception_4c_5_5') inception_4c_pool = max_pool_2d(inception_4b_output, kernel_size=3, strides=1) inception_4c_pool_1_1 = conv_2d(inception_4c_pool, 64, filter_size=1, activation='relu', name='inception_4c_pool_1_1') inception_4c_output = merge([ inception_4c_1_1, inception_4c_3_3, inception_4c_5_5, inception_4c_pool_1_1 ], mode='concat', axis=3, name='inception_4c_output') inception_4d_1_1 = conv_2d(inception_4c_output, 112, filter_size=1, activation='relu', name='inception_4d_1_1') inception_4d_3_3_reduce = conv_2d(inception_4c_output, 144, filter_size=1, activation='relu', name='inception_4d_3_3_reduce') inception_4d_3_3 = conv_2d(inception_4d_3_3_reduce, 288, filter_size=3, activation='relu', name='inception_4d_3_3') inception_4d_5_5_reduce = conv_2d(inception_4c_output, 32, filter_size=1, activation='relu', name='inception_4d_5_5_reduce') inception_4d_5_5 = conv_2d(inception_4d_5_5_reduce, 64, filter_size=5, activation='relu', name='inception_4d_5_5') inception_4d_pool = max_pool_2d(inception_4c_output, kernel_size=3, strides=1, name='inception_4d_pool') inception_4d_pool_1_1 = conv_2d(inception_4d_pool, 64, filter_size=1, activation='relu', name='inception_4d_pool_1_1') inception_4d_output = merge([ inception_4d_1_1, inception_4d_3_3, inception_4d_5_5, inception_4d_pool_1_1 ], mode='concat', axis=3, name='inception_4d_output') inception_4e_1_1 = conv_2d(inception_4d_output, 256, filter_size=1, activation='relu', name='inception_4e_1_1') inception_4e_3_3_reduce = conv_2d(inception_4d_output, 160, filter_size=1, activation='relu', name='inception_4e_3_3_reduce') inception_4e_3_3 = conv_2d(inception_4e_3_3_reduce, 320, filter_size=3, activation='relu', name='inception_4e_3_3') inception_4e_5_5_reduce = conv_2d(inception_4d_output, 32, filter_size=1, activation='relu', name='inception_4e_5_5_reduce') inception_4e_5_5 = conv_2d(inception_4e_5_5_reduce, 128, filter_size=5, activation='relu', name='inception_4e_5_5') inception_4e_pool = max_pool_2d(inception_4d_output, kernel_size=3, strides=1, name='inception_4e_pool') inception_4e_pool_1_1 = conv_2d(inception_4e_pool, 128, filter_size=1, activation='relu', name='inception_4e_pool_1_1') inception_4e_output = merge([ inception_4e_1_1, inception_4e_3_3, inception_4e_5_5, inception_4e_pool_1_1 ], axis=3, mode='concat') pool4_3_3 = max_pool_2d(inception_4e_output, kernel_size=3, strides=2, name='pool_3_3') inception_5a_1_1 = conv_2d(pool4_3_3, 256, filter_size=1, activation='relu', name='inception_5a_1_1') inception_5a_3_3_reduce = conv_2d(pool4_3_3, 160, filter_size=1, activation='relu', name='inception_5a_3_3_reduce') inception_5a_3_3 = conv_2d(inception_5a_3_3_reduce, 320, filter_size=3, activation='relu', name='inception_5a_3_3') inception_5a_5_5_reduce = conv_2d(pool4_3_3, 32, filter_size=1, activation='relu', name='inception_5a_5_5_reduce') inception_5a_5_5 = conv_2d(inception_5a_5_5_reduce, 128, filter_size=5, activation='relu', name='inception_5a_5_5') inception_5a_pool = max_pool_2d(pool4_3_3, kernel_size=3, strides=1, name='inception_5a_pool') inception_5a_pool_1_1 = conv_2d(inception_5a_pool, 128, filter_size=1, activation='relu', name='inception_5a_pool_1_1') inception_5a_output = merge([ inception_5a_1_1, inception_5a_3_3, inception_5a_5_5, inception_5a_pool_1_1 ], axis=3, mode='concat') inception_5b_1_1 = conv_2d(inception_5a_output, 384, filter_size=1, activation='relu', name='inception_5b_1_1') inception_5b_3_3_reduce = conv_2d(inception_5a_output, 192, filter_size=1, activation='relu', name='inception_5b_3_3_reduce') inception_5b_3_3 = conv_2d(inception_5b_3_3_reduce, 384, filter_size=3, activation='relu', name='inception_5b_3_3') inception_5b_5_5_reduce = conv_2d(inception_5a_output, 48, filter_size=1, activation='relu', name='inception_5b_5_5_reduce') inception_5b_5_5 = conv_2d(inception_5b_5_5_reduce, 128, filter_size=5, activation='relu', name='inception_5b_5_5') inception_5b_pool = max_pool_2d(inception_5a_output, kernel_size=3, strides=1, name='inception_5b_pool') inception_5b_pool_1_1 = conv_2d(inception_5b_pool, 128, filter_size=1, activation='relu', name='inception_5b_pool_1_1') inception_5b_output = merge([ inception_5b_1_1, inception_5b_3_3, inception_5b_5_5, inception_5b_pool_1_1 ], axis=3, mode='concat') pool5_7_7 = avg_pool_2d(inception_5b_output, kernel_size=7, strides=1) pool5_7_7 = dropout(pool5_7_7, 0.4) loss = fully_connected(pool5_7_7, output, activation='softmax') network = regression(loss, optimizer='momentum', loss='categorical_crossentropy', learning_rate=lr, name='targets') model = tflearn.DNN(network, max_checkpoints=0, tensorboard_verbose=0, tensorboard_dir='log') return model
def network(img_shape, name, LR): img_prep = ImagePreprocessing() img_prep.add_featurewise_zero_center() img_prep.add_featurewise_stdnorm() # # # Real-time data augmentation img_aug = ImageAugmentation() img_aug.add_random_blur (sigma_max=3.0) img_aug.add_random_flip_leftright() img_aug.add_random_flip_updown() img_aug.add_random_90degrees_rotation(rotations=[0, 2]) network = input_data(shape=img_shape, name=name, data_preprocessing=img_prep, data_augmentation=img_aug ) conv1a_3_3 = relu(batch_normalization(conv_2d(network, 32, 3, strides=2, bias=False, padding='VALID',activation=None,name='Conv2d_1a_3x3'))) conv2a_3_3 = relu(batch_normalization(conv_2d(conv1a_3_3, 32, 3, bias=False, padding='VALID',activation=None, name='Conv2d_2a_3x3'))) conv2b_3_3 = relu(batch_normalization(conv_2d(conv2a_3_3, 64, 3, bias=False, activation=None, name='Conv2d_2b_3x3'))) maxpool3a_3_3 = max_pool_2d(conv2b_3_3, 3, strides=2, padding='VALID', name='MaxPool_3a_3x3') conv3b_1_1 = relu(batch_normalization(conv_2d(maxpool3a_3_3, 80, 1, bias=False, padding='VALID',activation=None, name='Conv2d_3b_1x1'))) conv4a_3_3 = relu(batch_normalization(conv_2d(conv3b_1_1, 192, 3, bias=False, padding='VALID',activation=None, name='Conv2d_4a_3x3'))) maxpool5a_3_3 = max_pool_2d(conv4a_3_3, 3, strides=2, padding='VALID', name='MaxPool_5a_3x3') tower_conv = relu(batch_normalization(conv_2d(maxpool5a_3_3, 96, 1, bias=False, activation=None, name='Conv2d_5b_b0_1x1'))) tower_conv1_0 = relu(batch_normalization(conv_2d(maxpool5a_3_3, 48, 1, bias=False, activation=None, name='Conv2d_5b_b1_0a_1x1'))) tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1_0, 64, 5, bias=False, activation=None, name='Conv2d_5b_b1_0b_5x5'))) tower_conv2_0 = relu(batch_normalization(conv_2d(maxpool5a_3_3, 64, 1, bias=False, activation=None, name='Conv2d_5b_b2_0a_1x1'))) tower_conv2_1 = relu(batch_normalization(conv_2d(tower_conv2_0, 96, 3, bias=False, activation=None, name='Conv2d_5b_b2_0b_3x3'))) tower_conv2_2 = relu(batch_normalization(conv_2d(tower_conv2_1, 96, 3, bias=False, activation=None,name='Conv2d_5b_b2_0c_3x3'))) tower_pool3_0 = avg_pool_2d(maxpool5a_3_3, 3, strides=1, padding='same', name='AvgPool_5b_b3_0a_3x3') tower_conv3_1 = relu(batch_normalization(conv_2d(tower_pool3_0, 64, 1, bias=False, activation=None,name='Conv2d_5b_b3_0b_1x1'))) tower_5b_out = merge([tower_conv, tower_conv1_1, tower_conv2_2, tower_conv3_1], mode='concat', axis=3) net = repeat(tower_5b_out, 10, block35, scale=0.17) tower_conv = relu(batch_normalization(conv_2d(net, 384, 3, bias=False, strides=2,activation=None, padding='VALID', name='Conv2d_6a_b0_0a_3x3'))) tower_conv1_0 = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, activation=None, name='Conv2d_6a_b1_0a_1x1'))) tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1_0, 256, 3, bias=False, activation=None, name='Conv2d_6a_b1_0b_3x3'))) tower_conv1_2 = relu(batch_normalization(conv_2d(tower_conv1_1, 384, 3, bias=False, strides=2, padding='VALID', activation=None,name='Conv2d_6a_b1_0c_3x3'))) tower_pool = max_pool_2d(net, 3, strides=2, padding='VALID',name='MaxPool_1a_3x3') net = merge([tower_conv, tower_conv1_2, tower_pool], mode='concat', axis=3) net = repeat(net, 20, block17, scale=0.1) tower_conv = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, activation=None, name='Conv2d_0a_1x1'))) # tower_conv0_1 = relu(batch_normalization(conv_2d(tower_conv, 384, 3, bias=False, strides=2, padding='VALID', activation=None,name='Conv2d_0a_1x1'))) tower_conv0_1 = relu(batch_normalization(conv_2d(tower_conv, 384, 1, bias=False, strides=2, padding='VALID', activation=None,name='Conv2d_0a_1x1'))) tower_conv1 = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, padding='VALID', activation=None,name='Conv2d_0a_1x1'))) # tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1,288,3, bias=False, strides=2, padding='VALID',activation=None, name='COnv2d_1a_3x3'))) tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1,288,1, bias=False, strides=2, padding='VALID',activation=None, name='COnv2d_1a_3x3'))) tower_conv2 = relu(batch_normalization(conv_2d(net, 256,1, bias=False, activation=None,name='Conv2d_0a_1x1'))) tower_conv2_1 = relu(batch_normalization(conv_2d(tower_conv2, 288,3, bias=False, name='Conv2d_0b_3x3',activation=None))) # tower_conv2_2 = relu(batch_normalization(conv_2d(tower_conv2_1, 320, 3, bias=False, strides=2, padding='VALID',activation=None, name='Conv2d_1a_3x3'))) tower_conv2_2 = relu(batch_normalization(conv_2d(tower_conv2_1, 320, 1, bias=False, strides=2, padding='VALID',activation=None, name='Conv2d_1a_3x3'))) # tower_pool = max_pool_2d(net, 3, strides=2, padding='VALID', name='MaxPool_1a_3x3') tower_pool = max_pool_2d(net, 1, strides=2, padding='VALID', name='MaxPool_1a_3x3') net = merge([tower_conv0_1, tower_conv1_1,tower_conv2_2, tower_pool], mode='concat', axis=3) net = repeat(net, 9, block8, scale=0.2) net = block8(net, activation=None) net = relu(batch_normalization(conv_2d(net, 1536, 1, bias=False, activation=None, name='Conv2d_7b_1x1'))) net = avg_pool_2d(net, net.get_shape().as_list()[1:3],strides=2, padding='VALID', name='AvgPool_1a_8x8') net = flatten(net) net = dropout(net, dropout_keep_prob) loss = fully_connected(net, num_classes,activation='softmax') network = tflearn.regression(loss, optimizer='RMSprop', loss='categorical_crossentropy', learning_rate=0.0001, name='targets') return network
def _model5(): global yTest, img_aug tf.reset_default_graph() img_prep = ImagePreprocessing() img_prep.add_featurewise_zero_center() img_prep.add_featurewise_stdnorm() def block35(net, scale=1.0, activation="relu"): tower_conv = relu(batch_normalization(conv_2d(net, 32, 1, bias=False, activation=None, name='Conv2d_1x1'))) tower_conv1_0 = relu(batch_normalization(conv_2d(net, 32, 1, bias=False, activation=None,name='Conv2d_0a_1x1'))) tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1_0, 32, 3, bias=False, activation=None,name='Conv2d_0b_3x3'))) tower_conv2_0 = relu(batch_normalization(conv_2d(net, 32, 1, bias=False, activation=None, name='Conv2d_0a_1x1'))) tower_conv2_1 = relu(batch_normalization(conv_2d(tower_conv2_0, 48,3, bias=False, activation=None, name='Conv2d_0b_3x3'))) tower_conv2_2 = relu(batch_normalization(conv_2d(tower_conv2_1, 64,3, bias=False, activation=None, name='Conv2d_0c_3x3'))) tower_mixed = merge([tower_conv, tower_conv1_1, tower_conv2_2], mode='concat', axis=3) tower_out = relu(batch_normalization(conv_2d(tower_mixed, net.get_shape()[3], 1, bias=False, activation=None, name='Conv2d_1x1'))) net += scale * tower_out if activation: if isinstance(activation, str): net = activations.get(activation)(net) elif hasattr(activation, '__call__'): net = activation(net) else: raise ValueError("Invalid Activation.") return net def block17(net, scale=1.0, activation="relu"): tower_conv = relu(batch_normalization(conv_2d(net, 192, 1, bias=False, activation=None, name='Conv2d_1x1'))) tower_conv_1_0 = relu(batch_normalization(conv_2d(net, 128, 1, bias=False, activation=None, name='Conv2d_0a_1x1'))) tower_conv_1_1 = relu(batch_normalization(conv_2d(tower_conv_1_0, 160,[1,7], bias=False, activation=None,name='Conv2d_0b_1x7'))) tower_conv_1_2 = relu(batch_normalization(conv_2d(tower_conv_1_1, 192, [7,1], bias=False, activation=None,name='Conv2d_0c_7x1'))) tower_mixed = merge([tower_conv,tower_conv_1_2], mode='concat', axis=3) tower_out = relu(batch_normalization(conv_2d(tower_mixed, net.get_shape()[3], 1, bias=False, activation=None, name='Conv2d_1x1'))) net += scale * tower_out if activation: if isinstance(activation, str): net = activations.get(activation)(net) elif hasattr(activation, '__call__'): net = activation(net) else: raise ValueError("Invalid Activation.") return net def block8(net, scale=1.0, activation="relu"): tower_conv = relu(batch_normalization(conv_2d(net, 192, 1, bias=False, activation=None, name='Conv2d_1x1'))) tower_conv1_0 = relu(batch_normalization(conv_2d(net, 192, 1, bias=False, activation=None, name='Conv2d_0a_1x1'))) tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1_0, 224, [1,3], bias=False, activation=None, name='Conv2d_0b_1x3'))) tower_conv1_2 = relu(batch_normalization(conv_2d(tower_conv1_1, 256, [3,1], bias=False, name='Conv2d_0c_3x1'))) tower_mixed = merge([tower_conv,tower_conv1_2], mode='concat', axis=3) tower_out = relu(batch_normalization(conv_2d(tower_mixed, net.get_shape()[3], 1, bias=False, activation=None, name='Conv2d_1x1'))) net += scale * tower_out if activation: if isinstance(activation, str): net = activations.get(activation)(net) elif hasattr(activation, '__call__'): net = activation(net) else: raise ValueError("Invalid Activation.") return net num_classes = len(Y[0]) dropout_keep_prob = 0.8 network = input_data(shape=[None, inputSize, inputSize, dim], name='input', data_preprocessing=img_prep, data_augmentation=img_aug) conv1a_3_3 = relu(batch_normalization(conv_2d(network, 32, 3, strides=2, bias=False, padding='VALID',activation=None,name='Conv2d_1a_3x3'))) conv2a_3_3 = relu(batch_normalization(conv_2d(conv1a_3_3, 32, 3, bias=False, padding='VALID',activation=None, name='Conv2d_2a_3x3'))) conv2b_3_3 = relu(batch_normalization(conv_2d(conv2a_3_3, 64, 3, bias=False, activation=None, name='Conv2d_2b_3x3'))) maxpool3a_3_3 = max_pool_2d(conv2b_3_3, 3, strides=2, padding='VALID', name='MaxPool_3a_3x3') conv3b_1_1 = relu(batch_normalization(conv_2d(maxpool3a_3_3, 80, 1, bias=False, padding='VALID',activation=None, name='Conv2d_3b_1x1'))) conv4a_3_3 = relu(batch_normalization(conv_2d(conv3b_1_1, 192, 3, bias=False, padding='VALID',activation=None, name='Conv2d_4a_3x3'))) maxpool5a_3_3 = max_pool_2d(conv4a_3_3, 3, strides=2, padding='VALID', name='MaxPool_5a_3x3') tower_conv = relu(batch_normalization(conv_2d(maxpool5a_3_3, 96, 1, bias=False, activation=None, name='Conv2d_5b_b0_1x1'))) tower_conv1_0 = relu(batch_normalization(conv_2d(maxpool5a_3_3, 48, 1, bias=False, activation=None, name='Conv2d_5b_b1_0a_1x1'))) tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1_0, 64, 5, bias=False, activation=None, name='Conv2d_5b_b1_0b_5x5'))) tower_conv2_0 = relu(batch_normalization(conv_2d(maxpool5a_3_3, 64, 1, bias=False, activation=None, name='Conv2d_5b_b2_0a_1x1'))) tower_conv2_1 = relu(batch_normalization(conv_2d(tower_conv2_0, 96, 3, bias=False, activation=None, name='Conv2d_5b_b2_0b_3x3'))) tower_conv2_2 = relu(batch_normalization(conv_2d(tower_conv2_1, 96, 3, bias=False, activation=None,name='Conv2d_5b_b2_0c_3x3'))) tower_pool3_0 = avg_pool_2d(maxpool5a_3_3, 3, strides=1, padding='same', name='AvgPool_5b_b3_0a_3x3') tower_conv3_1 = relu(batch_normalization(conv_2d(tower_pool3_0, 64, 1, bias=False, activation=None,name='Conv2d_5b_b3_0b_1x1'))) tower_5b_out = merge([tower_conv, tower_conv1_1, tower_conv2_2, tower_conv3_1], mode='concat', axis=3) net = repeat(tower_5b_out, 10, block35, scale=0.17) ''' tower_conv = relu(batch_normalization(conv_2d(net, 384, 3, bias=False, strides=2,activation=None, padding='VALID', name='Conv2d_6a_b0_0a_3x3'))) tower_conv1_0 = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, activation=None, name='Conv2d_6a_b1_0a_1x1'))) tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1_0, 256, 3, bias=False, activation=None, name='Conv2d_6a_b1_0b_3x3'))) tower_conv1_2 = relu(batch_normalization(conv_2d(tower_conv1_1, 384, 3, bias=False, strides=2, padding='VALID', activation=None,name='Conv2d_6a_b1_0c_3x3'))) tower_pool = max_pool_2d(net, 3, strides=2, padding='VALID',name='MaxPool_1a_3x3') net = merge([tower_conv, tower_conv1_2, tower_pool], mode='concat', axis=3) net = repeat(net, 20, block17, scale=0.1) tower_conv = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, activation=None, name='Conv2d_0a_1x1'))) tower_conv0_1 = relu(batch_normalization(conv_2d(tower_conv, 384, 3, bias=False, strides=2, padding='VALID', activation=None,name='Conv2d_0a_1x1'))) tower_conv1 = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, padding='VALID', activation=None,name='Conv2d_0a_1x1'))) tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1,288,3, bias=False, strides=2, padding='VALID',activation=None, name='COnv2d_1a_3x3'))) tower_conv2 = relu(batch_normalization(conv_2d(net, 256,1, bias=False, activation=None,name='Conv2d_0a_1x1'))) tower_conv2_1 = relu(batch_normalization(conv_2d(tower_conv2, 288,3, bias=False, name='Conv2d_0b_3x3',activation=None))) tower_conv2_2 = relu(batch_normalization(conv_2d(tower_conv2_1, 320, 3, bias=False, strides=2, padding='VALID',activation=None, name='Conv2d_1a_3x3'))) tower_pool = max_pool_2d(net, 3, strides=2, padding='VALID', name='MaxPool_1a_3x3') ''' tower_conv = relu(batch_normalization(conv_2d(net, 384, 1, bias=False, strides=2,activation=None, padding='VALID', name='Conv2d_6a_b0_0a_3x3'))) tower_conv1_0 = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, activation=None, name='Conv2d_6a_b1_0a_1x1'))) tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1_0, 256, 1, bias=False, activation=None, name='Conv2d_6a_b1_0b_3x3'))) tower_conv1_2 = relu(batch_normalization(conv_2d(tower_conv1_1, 384, 1, bias=False, strides=2, padding='VALID', activation=None,name='Conv2d_6a_b1_0c_3x3'))) tower_pool = max_pool_2d(net, 1, strides=2, padding='VALID',name='MaxPool_1a_3x3') net = merge([tower_conv, tower_conv1_2, tower_pool], mode='concat', axis=3) net = repeat(net, 20, block17, scale=0.1) tower_conv = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, activation=None, name='Conv2d_0a_1x1'))) tower_conv0_1 = relu(batch_normalization(conv_2d(tower_conv, 384, 1, bias=False, strides=2, padding='VALID', activation=None,name='Conv2d_0a_1x1'))) tower_conv1 = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, padding='VALID', activation=None,name='Conv2d_0a_1x1'))) tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1,288,1, bias=False, strides=2, padding='VALID',activation=None, name='COnv2d_1a_3x3'))) tower_conv2 = relu(batch_normalization(conv_2d(net, 256,1, bias=False, activation=None,name='Conv2d_0a_1x1'))) tower_conv2_1 = relu(batch_normalization(conv_2d(tower_conv2, 288,1, bias=False, name='Conv2d_0b_3x3',activation=None))) tower_conv2_2 = relu(batch_normalization(conv_2d(tower_conv2_1, 320, 1, bias=False, strides=2, padding='VALID',activation=None, name='Conv2d_1a_3x3'))) tower_pool = max_pool_2d(net, 1, strides=2, padding='VALID', name='MaxPool_1a_3x3') #### net = merge([tower_conv0_1, tower_conv1_1,tower_conv2_2, tower_pool], mode='concat', axis=3) net = repeat(net, 9, block8, scale=0.2) net = block8(net, activation=None) net = relu(batch_normalization(conv_2d(net, 1536, 1, bias=False, activation=None, name='Conv2d_7b_1x1'))) net = avg_pool_2d(net, net.get_shape().as_list()[1:3],strides=2, padding='VALID', name='AvgPool_1a_8x8') net = flatten(net) net = dropout(net, dropout_keep_prob) loss = fully_connected(net, num_classes,activation='softmax') network = tflearn.regression(loss, optimizer='RMSprop', loss='categorical_crossentropy', learning_rate=0.0001) model = tflearn.DNN(network, checkpoint_path='inception_resnet_v2', max_checkpoints=1, tensorboard_verbose=2, tensorboard_dir="./tflearn_logs/") model.fit(X, Y, n_epoch=epochNum, validation_set=(xTest, yTest), shuffle=True, show_metric=True, batch_size=batchNum, snapshot_step=2000, snapshot_epoch=False, run_id='inception_resnet_v2_oxflowers17') if modelStore: model.save(_id + '-model.tflearn')
tower_conv = relu(batch_normalization(conv_2d(maxpool5a_3_3, 96, 1, bias=False, activation=None, name='Conv2d_5b_b0_1x1'))) tower_conv1_0 = relu(batch_normalization(conv_2d(maxpool5a_3_3, 48, 1, bias=False, activation=None, name='Conv2d_5b_b1_0a_1x1'))) tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1_0, 64, 5, bias=False, activation=None, name='Conv2d_5b_b1_0b_5x5'))) tower_conv2_0 = relu(batch_normalization(conv_2d(maxpool5a_3_3, 64, 1, bias=False, activation=None, name='Conv2d_5b_b2_0a_1x1'))) tower_conv2_1 = relu(batch_normalization(conv_2d(tower_conv2_0, 96, 3, bias=False, activation=None, name='Conv2d_5b_b2_0b_3x3'))) tower_conv2_2 = relu(batch_normalization(conv_2d(tower_conv2_1, 96, 3, bias=False, activation=None,name='Conv2d_5b_b2_0c_3x3'))) tower_pool3_0 = avg_pool_2d(maxpool5a_3_3, 3, strides=1, padding='same', name='AvgPool_5b_b3_0a_3x3') tower_conv3_1 = relu(batch_normalization(conv_2d(tower_pool3_0, 64, 1, bias=False, activation=None,name='Conv2d_5b_b3_0b_1x1'))) tower_5b_out = merge([tower_conv, tower_conv1_1, tower_conv2_2, tower_conv3_1], mode='concat', axis=3) net = repeat(tower_5b_out, 10, block35, scale=0.17) tower_conv = relu(batch_normalization(conv_2d(net, 384, 3, bias=False, strides=2,activation=None, padding='VALID', name='Conv2d_6a_b0_0a_3x3'))) tower_conv1_0 = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, activation=None, name='Conv2d_6a_b1_0a_1x1'))) tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1_0, 256, 3, bias=False, activation=None, name='Conv2d_6a_b1_0b_3x3'))) tower_conv1_2 = relu(batch_normalization(conv_2d(tower_conv1_1, 384, 3, bias=False, strides=2, padding='VALID', activation=None,name='Conv2d_6a_b1_0c_3x3'))) tower_pool = max_pool_2d(net, 3, strides=2, padding='VALID',name='MaxPool_1a_3x3') net = merge([tower_conv, tower_conv1_2, tower_pool], mode='concat', axis=3) net = repeat(net, 20, block17, scale=0.1) tower_conv = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, activation=None, name='Conv2d_0a_1x1'))) tower_conv0_1 = relu(batch_normalization(conv_2d(tower_conv, 384, 3, bias=False, strides=2, padding='VALID', activation=None,name='Conv2d_0a_1x1'))) tower_conv1 = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, padding='VALID', activation=None,name='Conv2d_0a_1x1'))) tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1,288,3, bias=False, strides=2, padding='VALID',activation=None, name='COnv2d_1a_3x3')))
def _model5(): global yTest, img_aug tf.reset_default_graph() img_prep = ImagePreprocessing() img_prep.add_featurewise_zero_center() img_prep.add_featurewise_stdnorm() def block35(net, scale=1.0, activation="relu"): tower_conv = relu(batch_normalization(conv_2d(net, 32, 1, bias=False, activation=None, name='Conv2d_1x1'))) tower_conv1_0 = relu(batch_normalization(conv_2d(net, 32, 1, bias=False, activation=None,name='Conv2d_0a_1x1'))) tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1_0, 32, 3, bias=False, activation=None,name='Conv2d_0b_3x3'))) tower_conv2_0 = relu(batch_normalization(conv_2d(net, 32, 1, bias=False, activation=None, name='Conv2d_0a_1x1'))) tower_conv2_1 = relu(batch_normalization(conv_2d(tower_conv2_0, 48,3, bias=False, activation=None, name='Conv2d_0b_3x3'))) tower_conv2_2 = relu(batch_normalization(conv_2d(tower_conv2_1, 64,3, bias=False, activation=None, name='Conv2d_0c_3x3'))) tower_mixed = merge([tower_conv, tower_conv1_1, tower_conv2_2], mode='concat', axis=3) tower_out = relu(batch_normalization(conv_2d(tower_mixed, net.get_shape()[3], 1, bias=False, activation=None, name='Conv2d_1x1'))) net += scale * tower_out if activation: if isinstance(activation, str): net = activations.get(activation)(net) elif hasattr(activation, '__call__'): net = activation(net) else: raise ValueError("Invalid Activation.") return net def block17(net, scale=1.0, activation="relu"): tower_conv = relu(batch_normalization(conv_2d(net, 192, 1, bias=False, activation=None, name='Conv2d_1x1'))) tower_conv_1_0 = relu(batch_normalization(conv_2d(net, 128, 1, bias=False, activation=None, name='Conv2d_0a_1x1'))) tower_conv_1_1 = relu(batch_normalization(conv_2d(tower_conv_1_0, 160,[1,7], bias=False, activation=None,name='Conv2d_0b_1x7'))) tower_conv_1_2 = relu(batch_normalization(conv_2d(tower_conv_1_1, 192, [7,1], bias=False, activation=None,name='Conv2d_0c_7x1'))) tower_mixed = merge([tower_conv,tower_conv_1_2], mode='concat', axis=3) tower_out = relu(batch_normalization(conv_2d(tower_mixed, net.get_shape()[3], 1, bias=False, activation=None, name='Conv2d_1x1'))) net += scale * tower_out if activation: if isinstance(activation, str): net = activations.get(activation)(net) elif hasattr(activation, '__call__'): net = activation(net) else: raise ValueError("Invalid Activation.") return net def block8(net, scale=1.0, activation="relu"): tower_conv = relu(batch_normalization(conv_2d(net, 192, 1, bias=False, activation=None, name='Conv2d_1x1'))) tower_conv1_0 = relu(batch_normalization(conv_2d(net, 192, 1, bias=False, activation=None, name='Conv2d_0a_1x1'))) tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1_0, 224, [1,3], bias=False, activation=None, name='Conv2d_0b_1x3'))) tower_conv1_2 = relu(batch_normalization(conv_2d(tower_conv1_1, 256, [3,1], bias=False, name='Conv2d_0c_3x1'))) tower_mixed = merge([tower_conv,tower_conv1_2], mode='concat', axis=3) tower_out = relu(batch_normalization(conv_2d(tower_mixed, net.get_shape()[3], 1, bias=False, activation=None, name='Conv2d_1x1'))) net += scale * tower_out if activation: if isinstance(activation, str): net = activations.get(activation)(net) elif hasattr(activation, '__call__'): net = activation(net) else: raise ValueError("Invalid Activation.") return net num_classes = len(yTest[0]) dropout_keep_prob = 0.8 network = input_data(shape=[None, inputSize, inputSize, dim], name='input', data_preprocessing=img_prep, data_augmentation=img_aug) conv1a_3_3 = relu(batch_normalization(conv_2d(network, 32, 3, strides=2, bias=False, padding='VALID',activation=None,name='Conv2d_1a_3x3'))) conv2a_3_3 = relu(batch_normalization(conv_2d(conv1a_3_3, 32, 3, bias=False, padding='VALID',activation=None, name='Conv2d_2a_3x3'))) conv2b_3_3 = relu(batch_normalization(conv_2d(conv2a_3_3, 64, 3, bias=False, activation=None, name='Conv2d_2b_3x3'))) maxpool3a_3_3 = max_pool_2d(conv2b_3_3, 3, strides=2, padding='VALID', name='MaxPool_3a_3x3') conv3b_1_1 = relu(batch_normalization(conv_2d(maxpool3a_3_3, 80, 1, bias=False, padding='VALID',activation=None, name='Conv2d_3b_1x1'))) conv4a_3_3 = relu(batch_normalization(conv_2d(conv3b_1_1, 192, 3, bias=False, padding='VALID',activation=None, name='Conv2d_4a_3x3'))) maxpool5a_3_3 = max_pool_2d(conv4a_3_3, 3, strides=2, padding='VALID', name='MaxPool_5a_3x3') tower_conv = relu(batch_normalization(conv_2d(maxpool5a_3_3, 96, 1, bias=False, activation=None, name='Conv2d_5b_b0_1x1'))) tower_conv1_0 = relu(batch_normalization(conv_2d(maxpool5a_3_3, 48, 1, bias=False, activation=None, name='Conv2d_5b_b1_0a_1x1'))) tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1_0, 64, 5, bias=False, activation=None, name='Conv2d_5b_b1_0b_5x5'))) tower_conv2_0 = relu(batch_normalization(conv_2d(maxpool5a_3_3, 64, 1, bias=False, activation=None, name='Conv2d_5b_b2_0a_1x1'))) tower_conv2_1 = relu(batch_normalization(conv_2d(tower_conv2_0, 96, 3, bias=False, activation=None, name='Conv2d_5b_b2_0b_3x3'))) tower_conv2_2 = relu(batch_normalization(conv_2d(tower_conv2_1, 96, 3, bias=False, activation=None,name='Conv2d_5b_b2_0c_3x3'))) tower_pool3_0 = avg_pool_2d(maxpool5a_3_3, 3, strides=1, padding='same', name='AvgPool_5b_b3_0a_3x3') tower_conv3_1 = relu(batch_normalization(conv_2d(tower_pool3_0, 64, 1, bias=False, activation=None,name='Conv2d_5b_b3_0b_1x1'))) tower_5b_out = merge([tower_conv, tower_conv1_1, tower_conv2_2, tower_conv3_1], mode='concat', axis=3) net = repeat(tower_5b_out, 10, block35, scale=0.17) ''' tower_conv = relu(batch_normalization(conv_2d(net, 384, 3, bias=False, strides=2,activation=None, padding='VALID', name='Conv2d_6a_b0_0a_3x3'))) tower_conv1_0 = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, activation=None, name='Conv2d_6a_b1_0a_1x1'))) tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1_0, 256, 3, bias=False, activation=None, name='Conv2d_6a_b1_0b_3x3'))) tower_conv1_2 = relu(batch_normalization(conv_2d(tower_conv1_1, 384, 3, bias=False, strides=2, padding='VALID', activation=None,name='Conv2d_6a_b1_0c_3x3'))) tower_pool = max_pool_2d(net, 3, strides=2, padding='VALID',name='MaxPool_1a_3x3') net = merge([tower_conv, tower_conv1_2, tower_pool], mode='concat', axis=3) net = repeat(net, 20, block17, scale=0.1) tower_conv = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, activation=None, name='Conv2d_0a_1x1'))) tower_conv0_1 = relu(batch_normalization(conv_2d(tower_conv, 384, 3, bias=False, strides=2, padding='VALID', activation=None,name='Conv2d_0a_1x1'))) tower_conv1 = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, padding='VALID', activation=None,name='Conv2d_0a_1x1'))) tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1,288,3, bias=False, strides=2, padding='VALID',activation=None, name='COnv2d_1a_3x3'))) tower_conv2 = relu(batch_normalization(conv_2d(net, 256,1, bias=False, activation=None,name='Conv2d_0a_1x1'))) tower_conv2_1 = relu(batch_normalization(conv_2d(tower_conv2, 288,3, bias=False, name='Conv2d_0b_3x3',activation=None))) tower_conv2_2 = relu(batch_normalization(conv_2d(tower_conv2_1, 320, 3, bias=False, strides=2, padding='VALID',activation=None, name='Conv2d_1a_3x3'))) tower_pool = max_pool_2d(net, 3, strides=2, padding='VALID', name='MaxPool_1a_3x3') ''' tower_conv = relu(batch_normalization(conv_2d(net, 384, 1, bias=False, strides=2,activation=None, padding='VALID', name='Conv2d_6a_b0_0a_3x3'))) tower_conv1_0 = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, activation=None, name='Conv2d_6a_b1_0a_1x1'))) tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1_0, 256, 1, bias=False, activation=None, name='Conv2d_6a_b1_0b_3x3'))) tower_conv1_2 = relu(batch_normalization(conv_2d(tower_conv1_1, 384, 1, bias=False, strides=2, padding='VALID', activation=None,name='Conv2d_6a_b1_0c_3x3'))) tower_pool = max_pool_2d(net, 1, strides=2, padding='VALID',name='MaxPool_1a_3x3') net = merge([tower_conv, tower_conv1_2, tower_pool], mode='concat', axis=3) net = repeat(net, 20, block17, scale=0.1) tower_conv = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, activation=None, name='Conv2d_0a_1x1'))) tower_conv0_1 = relu(batch_normalization(conv_2d(tower_conv, 384, 1, bias=False, strides=2, padding='VALID', activation=None,name='Conv2d_0a_1x1'))) tower_conv1 = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, padding='VALID', activation=None,name='Conv2d_0a_1x1'))) tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1,288,1, bias=False, strides=2, padding='VALID',activation=None, name='COnv2d_1a_3x3'))) tower_conv2 = relu(batch_normalization(conv_2d(net, 256,1, bias=False, activation=None,name='Conv2d_0a_1x1'))) tower_conv2_1 = relu(batch_normalization(conv_2d(tower_conv2, 288,1, bias=False, name='Conv2d_0b_3x3',activation=None))) tower_conv2_2 = relu(batch_normalization(conv_2d(tower_conv2_1, 320, 1, bias=False, strides=2, padding='VALID',activation=None, name='Conv2d_1a_3x3'))) tower_pool = max_pool_2d(net, 1, strides=2, padding='VALID', name='MaxPool_1a_3x3') #### net = merge([tower_conv0_1, tower_conv1_1,tower_conv2_2, tower_pool], mode='concat', axis=3) net = repeat(net, 9, block8, scale=0.2) net = block8(net, activation=None) net = relu(batch_normalization(conv_2d(net, 1536, 1, bias=False, activation=None, name='Conv2d_7b_1x1'))) net = avg_pool_2d(net, net.get_shape().as_list()[1:3],strides=2, padding='VALID', name='AvgPool_1a_8x8') net = flatten(net) net = dropout(net, dropout_keep_prob) loss = fully_connected(net, num_classes,activation='softmax') network = tflearn.regression(loss, optimizer='RMSprop', loss='categorical_crossentropy', learning_rate=0.0001) model = tflearn.DNN(network, checkpoint_path='inception_resnet_v2', max_checkpoints=1, tensorboard_verbose=2, tensorboard_dir="./tflearn_logs/") model.load(_path) pred = model.predict(xTest) df = pd.DataFrame(pred) df.to_csv(_path + ".csv") newList = pred.copy() newList = convert2(newList) if _CSV: makeCSV(newList) pred = convert2(pred) pred = convert3(pred) yTest = convert3(yTest) print(metrics.confusion_matrix(yTest, pred)) print(metrics.classification_report(yTest, pred)) print('Accuracy', accuracy_score(yTest, pred)) print() if _wrFile: writeTest(pred)