Exemplo n.º 1
0
def get_network():
        # input images
        network = input_data(shape=[None,33,33,4], name='input')

        # local pathway [conv + pool + norm]
        #path1 = conv_2d(network, 32, 7, activation='relu', regularizer="L2", padding='valid')
        path1 = conv_2d(network, 64, 7, activation='relu', regularizer="L2", padding='valid')
        path1 = max_pool_2d(path1, 4, 1, padding='valid')
        path1 = dropout(path1, 0.5)
        #path1 = local_response_normalization(path1)

        #path1 = conv_2d(path1, 32, 3, activation='relu', regularizer="L2", padding='valid')
        path1 = conv_2d(path1, 64, 3, activation='relu', regularizer="L2", padding='valid')
        path1 = max_pool_2d(path1, 2, 1, padding='valid')
        path1 = dropout(path1, 0.5)
        #path1 = local_response_normalization(path1)

        # global pathway
        #path2 = conv_2d(network, 80, 13, activation='relu', regularizer="L2", padding='valid')
        path2 = conv_2d(network, 160, 13, activation='relu', regularizer="L2", padding='valid')
        path2 = dropout(path2, 0.5)
        #path2 = local_response_normalization(path2)

        network = merge([path1,path2],'concat',axis=3)

        network = conv_2d(network, 5, 21, activation='relu', regularizer="L2")
        network = flatten(network, name="flatten")

        # softmax + output layers
        network = fully_connected(network, 5, activation='softmax', name='soft')
        network = regression(network, optimizer='adam', learning_rate=0.00005, # 0.0001
                             loss='categorical_crossentropy', name='target', batch_size=500)
	return network
Exemplo n.º 2
0
def neural_network_model4(IMG_SIZE, LR):
    convnet = input_data(shape=[None, IMG_SIZE, IMG_SIZE, 1], name='input')

    convnet = conv_2d(convnet, 64, (5, 5), activation='relu')
    convnet = max_pool_2d(convnet, (5, 5), strides=(2, 2))

    convnet = conv_2d(convnet, 64, (3, 3), activation='relu')
    convnet = conv_2d(convnet, 64, (3, 3), activation='relu')
    convnet = avg_pool_2d(convnet, (3, 3), strides=(2, 2))
    convnet = dropout(convnet, 0.7)

    convnet = conv_2d(convnet, 128, (3, 3), activation='relu')
    convnet = conv_2d(convnet, 128, (3, 3), activation='relu')
    convnet = avg_pool_2d(convnet, (3, 3), strides=(2, 2))
    convnet = dropout(convnet, 0.7)

    convnet = flatten(convnet)

    convnet = fully_connected(convnet, 1024, activation='relu')
    convnet = dropout(convnet, 0.7)

    convnet = fully_connected(convnet, 1024, activation='relu')
    convnet = dropout(convnet, 0.7)

    convnet = fully_connected(convnet, 7, activation='softmax')
    convnet = regression(convnet,
                         optimizer='adam',
                         learning_rate=LR,
                         loss='categorical_crossentropy',
                         name='targets')
    convnet = dropout(convnet, 0.7)

    model = tflearn.DNN(convnet, tensorboard_dir='log')
    return model
Exemplo n.º 3
0
def create_model():
    print("Building CNN")

    network = input_data(shape=[None, 48, 48, 1])
    #print("Input Data",network.shape[1:])
    network = conv_2d(network, 32, 3, padding='SAME', activation='relu')
    network = max_pool_2d(network, 2, strides=2, padding='SAME')

    network = conv_2d(network, 64, 3, padding='SAME', activation='relu')
    network = max_pool_2d(network, 2, strides=2, padding='SAME')

    network = conv_2d(network, 64, 3, padding='SAME', activation='relu')
    network = max_pool_2d(network, 2, strides=2, padding='SAME')

    network = conv_2d(network, 128, 3, padding='SAME', activation='relu')
    network = flatten(network)
    network = fully_connected(network, 3072, activation='tanh')
    network = dropout(network, 0.5)
    network = fully_connected(network, 4096, activation='tanh')
    network = dropout(network, 0.5)
    network = fully_connected(network, 7, activation='softmax')
    network = regression(network,
                         optimizer='adam',
                         loss='categorical_crossentropy',
                         learning_rate=0.001)
    print('MODEL CREATED ....')
    return network
Exemplo n.º 4
0
def build_model(x, keep_prob, conv_ksize, conv_strides, pool_ksize,
                pool_strides):
    # First convolution & maxpooling
    layer = conv_maxpool(x, 64, conv_ksize, conv_strides, pool_ksize,
                         pool_strides)

    # Second convolution & maxpooling
    layer = conv_maxpool(layer, 64, conv_ksize, conv_strides, pool_ksize,
                         pool_strides)

    # Second convolution & maxpooling
    # layer = conv_maxpool(layer, 1024, conv_ksize, conv_strides, pool_ksize, pool_strides)

    # Flatten Layer
    layer = flatten(layer)

    # Fully connect layer
    layer_output = fully_connected(layer,
                                   1024,
                                   activation='relu',
                                   weights_init='truncated_normal',
                                   bias_init='truncated_normal')
    layer_output = dropout(layer_output, keep_prob)
    layer_output = fully_connected(layer_output,
                                   512,
                                   activation='relu',
                                   weights_init='truncated_normal',
                                   bias_init='truncated_normal')
    #     layer_output = dropout(layer_output, keep_prob)
    layer_output = fully_connected(layer_output,
                                   10,
                                   weights_init='truncated_normal',
                                   bias_init='truncated_normal')

    return layer_output
Exemplo n.º 5
0
def tflearn_model():
    network = input_data(shape=[None, 200, 66, 3], name='input')
    network = batch_normalization(network, epsilon=0.001)
    network = conv_2d(network, 24, 5, strides=2, activation='relu', padding='valid')
    network = batch_normalization(network)
    network = conv_2d(network, 36, 5, strides=2, activation='relu', padding='valid')
    network = batch_normalization(network)
    network = conv_2d(network, 48, 5, strides=2, activation='relu', padding='valid')
    network = batch_normalization(network)
    network = conv_2d(network, 64, 3, strides=1, activation='relu', padding='valid')
    network = batch_normalization(network)
    network = conv_2d(network, 64, 3, strides=1, activation='relu', padding='valid')
    network = batch_normalization(network)
    network = flatten(network)
    network = fully_connected(network, 1164, activation='relu')
    network = batch_normalization(network)
    network = fully_connected(network, 100, activation='relu')
    network = batch_normalization(network)
    network = fully_connected(network, 50, activation='relu')
    network = batch_normalization(network)
    network = fully_connected(network, 10, activation='relu')
    network = batch_normalization(network)
    network = fully_connected(network, 2, activation='tanh')
    network = regression(network, optimizer='adam', loss='mean_square', name='targets')
    model = tflearn.DNN(network, checkpoint_path='nvidia_model', max_checkpoints=1,
                        tensorboard_verbose=0, tensorboard_dir='tflog')

    return model
Exemplo n.º 6
0
def create2dConvNetNeuralNetworkModel(input_size, output_size, learningRate):

    # Specify the log directory
    logdir = 'log/2d/' + datetime.now().strftime('%Y%m%d-%H%M%S')

    convnet = input_data(shape=[None, input_size, input_size,1], name='input_currentState')

    tflearn.init_graph(num_cores=1, gpu_memory_fraction=0.9)

    convnet = conv_2d(convnet, nb_filter=16, filter_size=5, strides=1, padding='valid', activation='relu')
    convnet = max_pool_2d(convnet, kernel_size=2, strides=2, padding='valid')

    convnet = conv_2d(convnet, nb_filter=32, filter_size=3, strides=1, padding='valid', activation='relu')
    convnet = max_pool_2d(convnet, kernel_size=2, strides=2, padding='valid')

    convnet = flatten(convnet)

    convnet = fully_connected(convnet, n_units=256, weights_init='truncated_normal', activation='relu')
    convnet = dropout(convnet, 0.5)

    convnet = fully_connected(convnet, n_units=128, weights_init='truncated_normal', activation='relu')
    convnet = dropout(convnet, 0.5)

    convnet = fully_connected(convnet, n_units=output_size, activation='softmax')
    convnet = regression(convnet, optimizer='adam', learning_rate=learningRate, loss='categorical_crossentropy', name='targets')

    model = tflearn.DNN(convnet, tensorboard_dir=logdir)

    return model
Exemplo n.º 7
0
def classifier_forward(config,
                       incoming,
                       name=None,
                       reuse=False,
                       scope="classifier"):
    with tf.variable_scope(scope, name, reuse=reuse):
        network = incoming
        network = relu(
            batch_normalization(
                conv_2d(network,
                        32,
                        5,
                        activation='relu',
                        regularizer="L2",
                        strides=2)))
        network = relu(
            batch_normalization(
                conv_2d(network,
                        64,
                        5,
                        activation='relu',
                        regularizer="L2",
                        strides=2)))
        network = flatten(network)

        network = relu(batch_normalization(fully_connected(network, 1024)))
        network = dropout(network, 0.5)

        network = fully_connected(network, 10)

    return network
Exemplo n.º 8
0
 def add_flatten_layer(self,
                       prev_layer_name=None,
                       exclude_from_path=True,
                       **kwargs):
     prev_name = prev_layer_name or self._curr_layer_name
     prev = self._layers[prev_name]['layer']
     layer = {
         "layer": core.flatten(prev, **kwargs),
         "type": "Flatten",
         "categories": ["hidden"]
     }
     return self._add_layer(layer, exclude_from_path)
Exemplo n.º 9
0
def CNNModel(x, reuse=False):
    conv_1 = conv_2d(x, 8, [32, 1], activation='relu', regularizer="L2", scope='conv_1', reuse=reuse)
    avg_pool_1 = avg_pool_2d(conv_1, [1, 2])
    output_layer_1 = local_response_normalization(avg_pool_1)

    conv_2 = conv_2d(output_layer_1, 16, [1, 1], activation='relu', regularizer="L2", scope='conv_2', reuse=reuse)
    avg_pool_2 = avg_pool_2d(conv_2, [1, 2])
    output_layer_2 = local_response_normalization(avg_pool_2)

    output_conv = flatten(output_layer_2)

    return output_conv
Exemplo n.º 10
0
def classifier_forward(config,
                       incoming,
                       name=None,
                       reuse=False,
                       scope="classifier"):
    with tf.variable_scope(scope, name, reuse=reuse):
        network = incoming
        network = relu(conv_2d(network, 32, 5, strides=2))
        network = relu(conv_2d(network, 64, 5, strides=2))
        network = flatten(network)

        network = relu(fully_connected(network, 1024))
        network = dropout(network, 0.7)

        network = fully_connected(network, 10)

    return network
Exemplo n.º 11
0
def NiN(input_layer, num_class):
    network = conv_2d(input_layer, 192, 5, activation='relu')
    network = conv_2d(network, 160, 1, activation='relu')
    network = conv_2d(network, 96, 1, activation='relu')
    network = max_pool_2d(network, 3, strides=2)
    network = dropout(network, 0.5)
    network = conv_2d(network, 192, 5, activation='relu')
    network = conv_2d(network, 192, 1, activation='relu')
    network = conv_2d(network, 192, 1, activation='relu')
    network = avg_pool_2d(network, 3, strides=2)
    network = dropout(network, 0.5)
    network = conv_2d(network, 192, 3, activation='relu')
    network = conv_2d(network, 192, 1, activation='relu')
    network = conv_2d(network, 10, 1, activation='relu')
    network = avg_pool_2d(network, 8)
    network = flatten(network)
    return network
Exemplo n.º 12
0
def model_lenet5(data):
    layer1_conv = conv_2d(data,
                          nb_filter=6,
                          filter_size=5,
                          strides=[1, 1, 1, 1],
                          activation='relu',
                          padding='SAME',
                          bias=True)
    layer1_pool = avg_pool_2d(layer1_conv,
                              kernel_size=2,
                              strides=2,
                              padding='SAME')

    layer2_conv = conv_2d(layer1_pool,
                          nb_filter=16,
                          filter_size=5,
                          strides=[1, 1, 1, 1],
                          activation='relu',
                          padding='VALID',
                          bias=True)
    layer2_pool = avg_pool_2d(layer2_conv,
                              kernel_size=2,
                              strides=2,
                              padding='SAME')

    flat_layer = flatten(layer2_pool)
    layer3_fccd = fully_connected(flat_layer,
                                  n_units=120,
                                  activation='relu',
                                  bias=True)

    layer4_fccd = fully_connected(layer3_fccd,
                                  n_units=84,
                                  activation='relu',
                                  bias=True)

    #w = tf.Variable(tf.truncated_normal([84, 10], stddev=0.1))
    #b = tf.Variable(tf.constant(1.0, shape = [10]))

    #logits = tf.matmul(layer4_fccd, w) + b
    logits = fully_connected(layer3_fccd,
                             n_units=10,
                             activation='relu',
                             bias=True)
    return logits
Exemplo n.º 13
0
def conv(classes, input_shape):
    model = input_data(input_shape, name="input")
    model = conv_2d(model, 32, (3, 3), activation='relu')
    model = conv_2d(model, 64, (3, 3), activation='relu')
    model = max_pool_2d(model, (2, 2))
    model = dropout(model, 0.25)
    model = flatten(model)
    model = fully_connected(model, 128, activation='relu')
    model = dropout(model, 0.5)
    model = fully_connected(model, classes, activation='softmax')
    model = regression(model,
                       optimizer='adam',
                       learning_rate=0.001,
                       loss='categorical_crossentropy',
                       name='target')
    # Training
    model = tflearn.DNN(model, tensorboard_verbose=3)
    return model
Exemplo n.º 14
0
def retrain_sound_convnet(filepath):
    data = load_sound_data()
    trainX = data[2]
    trainY = data[3]
    testX = data[4]
    testY = data[5]

    ## design the network
    ## input is a 89 x 89 nparray reshaped from the original 1 x 7921
    ## with the audio information
    input_layer = input_data(shape=[None, 89, 89, 1])
    ## convolutional layer
    conv_layer = conv_2d(input_layer,
                        nb_filter=32,
                        filter_size=2,
                        activation='sigmoid',
                        name='conv_layer_1')
    ## max pooling layer, window of 2x2
    pool_layer = max_pool_2d(conv_layer, 2, name='pool_layer_2')
    ## dropout layer
    dropout_layer = dropout(pool_layer, 0.25)
    flat_layer = flatten(dropout_layer, name="flatten_layer")
    ## fully connected layer with 128
    fc_layer_1 = fully_connected(flat_layer, 256, activation='relu', name='fc_layer_1')
    ## dropout layer
    dropout_layer_1 = dropout(fc_layer_1, 0.25)
    ## fully connected with 3 layers, 0 is bee, 1 is cricket, 2 is ambient
    fc_layer_2 = fully_connected(dropout_layer_1, 3, activation='softmax', name='fc_layer_2')
    ## network is trained with sgd, categorical cross entropy loss function, and eta = 0.01
    network = regression(fc_layer_2, optimizer='sgd',
                        loss='categorical_crossentropy', learning_rate=0.01)
    ## turn the network into a model
    model = tflearn.DNN(network)
    model.load(filepath)
    ## now do the training on the network
    NUM_EPOCHS = 150
    BATCH_SIZE = 1
    model.fit(trainX, trainY, n_epoch=NUM_EPOCHS,
                shuffle=True,
                validation_set=(testX, testY),
                show_metric=True,
                batch_size = BATCH_SIZE,
                run_id='audio_convnet')
    model.save(filepath)
def Convolution_NN(input_size, arg):
    input_layer = input_data(shape=[None, input_size, input_size, arg],
                             name='input_layer')
    conv1 = conv_2d(input_layer,
                    nb_filter=filter_size_1,
                    filter_size=6,
                    strides=1,
                    activation='relu',
                    regularizer='L2')
    #conv1 = max_pool_2d(conv1,2)

    conv2 = conv_2d(conv1,
                    nb_filter=filter_Size_2,
                    filter_size=5,
                    strides=2,
                    activation='relu',
                    regularizer='L2')
    #conv2 = max_pool_2d(conv2,2)
    conv3 = conv_2d(conv2,
                    nb_filter=filter_size_3,
                    filter_size=4,
                    strides=2,
                    activation='relu',
                    regularizer='L2')

    full_layer1 = fully_connected(flatten(conv3),
                                  fullyconnected_size,
                                  activation='relu',
                                  regularizer='L2')
    full_layer1 = dropout(full_layer1, 0.75)

    out_layer = fully_connected(full_layer1, 10, activation='softmax')

    sgd = tflearn.SGD(learning_rate=0.1, lr_decay=0.096, decay_step=100)

    top_k = tflearn.metrics.top_k(3)

    network = regression(out_layer,
                         optimizer=sgd,
                         metric=top_k,
                         loss='categorical_crossentropy')
    return tflearn.DNN(network,
                       tensorboard_dir='tf_CNN_board',
                       tensorboard_verbose=3)
Exemplo n.º 16
0
def Net_in_Net1(network):
    network = conv_2d(network, 192, 5, activation='relu')
    network = conv_2d(network, 160, 1, activation='relu')
    network = conv_2d(network, 96, 1, activation='relu')
    network = max_pool_2d(network, 3, strides=2)
    network = dropout(network, 1.0)
    network = conv_2d(network, 192, 5, activation='relu')
    network = conv_2d(network, 192, 1, activation='relu')
    network = conv_2d(network, 192, 1, activation='relu')
    network = avg_pool_2d(network, 3, strides=2)
    network = dropout(network, 1.0)
    network = conv_2d(network, 192, 3, activation='relu')
    network = conv_2d(network, 192, 1, activation='relu')
    network = conv_2d(network, 10, 1, activation='relu')
    network = avg_pool_2d(network, 8)
    network = flatten(network)
    network = fully_connected(network, output_dim, activation='softmax')

    return network
Exemplo n.º 17
0
def create_net():
    """
    In this method che convolutional neural network is created.
    Return:
    ---------
    - model
    """
    tf.reset_default_graph()

    convnet = input_data(shape=[None, IMG_SIZE, IMG_SIZE, 1], name = 'input')

    convnet = conv_2d(convnet, 32, 5, activation = 'relu')
    convnet = max_pool_2d(convnet, 5)
    
    
    convnet = conv_2d(convnet, 64, 5, activation = 'relu')
    convnet = max_pool_2d(convnet, 5)
    
    
    convnet = conv_2d(convnet, 128, 5, activation = 'relu')
    convnet = max_pool_2d(convnet, 5)
    
    
    convnet = conv_2d(convnet, 64, 5, activation = 'relu')
    convnet = max_pool_2d(convnet, 5)
    
    
    convnet = conv_2d(convnet, 32, 5, activation = 'relu')
    convnet = max_pool_2d(convnet, 5)
    
    convnet = flatten(convnet)
    
    convnet = fully_connected(convnet, 1024, activation = 'relu')
    convnet = dropout(convnet, 0.8)
   
    convnet = fully_connected(convnet, 2, activation = 'softmax')
    convnet = regression(convnet, optimizer = 'adam', learning_rate = LearningRate,
                         loss = 'categorical_crossentropy', name = 'targets')
    
    model = tflearn.DNN(convnet, tensorboard_dir = 'log')
    return model
Exemplo n.º 18
0
def create_cnn_layers():
    shape = [None, IMAGE_STD_HEIGHT, IMAGE_STD_WIDTH, RGB_COLOR_COUNT]

    # input_layer = Input(name='input', shape=shape)
    input_layer = input_data(name='input', shape=shape)
    # h = Convolution2D(22, 5, 5, activation='relu', dim_ordering=dim_ordering)(input_layer)
    h = conv_2d_specialized(input_layer, 22, [5, 5])
    POOL_SIZE = [2, 2]
    # h = MaxPooling2D(pool_size=POOL_SIZE)(h)
    h = max_pool_2d(h, POOL_SIZE, padding='valid')
    h = local_response_normalization(h)
    # h = Convolution2D(44, 3, 3, activation='relu', dim_ordering=dim_ordering)(h)
    h = conv_2d_specialized(h, 44, [3, 3])
    # h = MaxPooling2D(pool_size=POOL_SIZE)(h)
    h = max_pool_2d(h, POOL_SIZE, padding='valid')
    h = local_response_normalization(h)
    # h = Dropout(0.25)(h)
    h = dropout(h, 1-0.25)
    # last_cnn_layer = Flatten()(h)
    last_cnn_layer = flatten(h)
    return input_layer, last_cnn_layer
Exemplo n.º 19
0
 def build_network_in_network(self, network):
     # network = input_data(shape=[None, 32, 32, 3])
     network = conv_2d(network, 192, 5, activation='relu')
     network = conv_2d(network, 160, 1, activation='relu')
     network = conv_2d(network, 96, 1, activation='relu')
     network = max_pool_2d(network, 3, strides=2)
     network = dropout(network, 0.5)
     network = conv_2d(network, 192, 5, activation='relu')
     network = conv_2d(network, 192, 1, activation='relu')
     network = conv_2d(network, 192, 1, activation='relu')
     network = avg_pool_2d(network, 3, strides=2)
     network = dropout(network, 0.5)
     network = conv_2d(network, 192, 3, activation='relu')
     network = conv_2d(network, 192, 1, activation='relu')
     network = conv_2d(network, 2, 1, activation='relu')
     network = avg_pool_2d(network, 8)
     network = flatten(network)
     network = regression(network, optimizer='adam',
                          loss='softmax_categorical_crossentropy',
                          learning_rate=0.001)
     return network
Exemplo n.º 20
0
def create_cnn_layers():
    shape = [None, IMAGE_STD_HEIGHT, IMAGE_STD_WIDTH, RGB_COLOR_COUNT]

    # input_layer = Input(name='input', shape=shape)
    input_layer = input_data(name='input', shape=shape)
    # h = Convolution2D(22, 5, 5, activation='relu', dim_ordering=dim_ordering)(input_layer)
    h = conv_2d_specialized(input_layer, 22, [5, 5])
    POOL_SIZE = [2, 2]
    # h = MaxPooling2D(pool_size=POOL_SIZE)(h)
    h = max_pool_2d(h, POOL_SIZE, padding='valid')
    h = local_response_normalization(h)
    # h = Convolution2D(44, 3, 3, activation='relu', dim_ordering=dim_ordering)(h)
    h = conv_2d_specialized(h, 44, [3, 3])
    # h = MaxPooling2D(pool_size=POOL_SIZE)(h)
    h = max_pool_2d(h, POOL_SIZE, padding='valid')
    h = local_response_normalization(h)
    # h = Dropout(0.25)(h)
    h = dropout(h, 1-0.25)
    # last_cnn_layer = Flatten()(h)
    last_cnn_layer = flatten(h)
    return input_layer, last_cnn_layer
Exemplo n.º 21
0
def Net_in_Net1(network, scale=False):
    if scale is True:
        network = scale(network)

    network = conv_2d(network, 192, 5, activation='relu')
    network = conv_2d(network, 160, 1, activation='relu')
    network = conv_2d(network, 96, 1, activation='relu')
    network = max_pool_2d(network, 3, strides=2)
    network = dropout(network, 0.5)
    network = conv_2d(network, 192, 5, activation='relu')
    network = conv_2d(network, 192, 1, activation='relu')
    network = conv_2d(network, 192, 1, activation='relu')
    network = avg_pool_2d(network, 3, strides=2)
    network = dropout(network, 0.5)
    network = conv_2d(network, 192, 3, activation='relu')
    network = conv_2d(network, 192, 1, activation='relu')
    network = conv_2d(network, 10, 1, activation='relu')
    network = avg_pool_2d(network, 8)
    network = flatten(network)
    network = fully_connected(network, output_dim, activation='sigmoid')

    return network
def le_net(IMG_PRE=img_pre, IMG_AUG=img_aug):
    '''
    Takes a custom Image Preprocessing and
    Image Augmentation techniques.
    '''
    network = tflearn.input_data(shape=[None, 28, 28, 1],
                                 data_preprocessing=IMG_PRE,
                                 data_augmentation=IMG_AUG)

    network = conv_2d(network, 32, (3, 3), activation='relu')

    network = batch_normalization(network)
    network = conv_2d(network, 32, (3, 3), activation='relu')
    network = max_pool_2d(network, 2)

    network = batch_normalization(network)
    network = conv_2d(network, 64, (3, 3), activation='relu')

    network = batch_normalization(network)
    network = conv_2d(network, 64, (3, 3), activation='relu')
    network = max_pool_2d(network, 2)

    network = flatten(network)

    network = batch_normalization(network)
    network = fully_connected(network, 512, activation='relu')
    network = dropout(network, 0.5)

    network = batch_normalization(network)
    network = fully_connected(network, 10, activation='softmax')

    network = regression(network,
                         optimizer='adam',
                         loss='categorical_crossentropy',
                         learning_rate=0.001)
    return network
Exemplo n.º 23
0
    batch_normalization(conv_2d(net, 256, 1, bias=False, padding='VALID', activation=None, name='Conv2d_0a_1x1')))
tower_conv1_1 = relu(batch_normalization(
    conv_2d(tower_conv1, 288, 3, bias=False, strides=2, padding='VALID', activation=None, name='COnv2d_1a_3x3')))

tower_conv2 = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, activation=None, name='Conv2d_0a_1x1')))
tower_conv2_1 = relu(
    batch_normalization(conv_2d(tower_conv2, 288, 3, bias=False, name='Conv2d_0b_3x3', activation=None)))
tower_conv2_2 = relu(batch_normalization(
    conv_2d(tower_conv2_1, 320, 3, bias=False, strides=2, padding='VALID', activation=None, name='Conv2d_1a_3x3')))

tower_pool = max_pool_2d(net, 3, strides=2, padding='VALID', name='MaxPool_1a_3x3')
net = merge([tower_conv0_1, tower_conv1_1, tower_conv2_2, tower_pool], mode='concat', axis=3)

net = repeat(net, 9, block8, scale=0.2)
net = block8(net, activation=None)

net = relu(batch_normalization(conv_2d(net, 1536, 1, bias=False, activation=None, name='Conv2d_7b_1x1')))
net = avg_pool_2d(net, net.get_shape().as_list()[1:3], strides=2, padding='VALID', name='AvgPool_1a_8x8')
net = flatten(net)
net = dropout(net, dropout_keep_prob)
loss = fully_connected(net, num_classes, activation='softmax')

network = tflearn.regression(loss, optimizer='RMSprop',
                             loss='categorical_crossentropy',
                             learning_rate=0.0001)
model = tflearn.DNN(network, checkpoint_path='inception_resnet_v2',
                    max_checkpoints=1, tensorboard_verbose=2, tensorboard_dir="./tflearn_logs/")
model.fit(X, Y, n_epoch=1000, validation_set=0.1, shuffle=True,
          show_metric=True, batch_size=32, snapshot_step=2000,
          snapshot_epoch=False, run_id='inception_resnet_v2_17flowers')
Exemplo n.º 24
0
def network(img_shape, name, LR):

    img_prep = ImagePreprocessing()
    img_prep.add_featurewise_zero_center()
    img_prep.add_featurewise_stdnorm()
    #
    # # Real-time data augmentation
    img_aug = ImageAugmentation()
    img_aug.add_random_blur (sigma_max=3.0)
    img_aug.add_random_flip_leftright()
    img_aug.add_random_flip_updown()
    img_aug.add_random_90degrees_rotation(rotations=[0, 2])    

    network = input_data(shape=img_shape, name=name, data_preprocessing=img_prep, data_augmentation=img_aug  ) 
    conv1a_3_3 = relu(batch_normalization(conv_2d(network, 32, 3, strides=2, bias=False, padding='VALID',activation=None,name='Conv2d_1a_3x3')))
    conv2a_3_3 = relu(batch_normalization(conv_2d(conv1a_3_3, 32, 3, bias=False, padding='VALID',activation=None, name='Conv2d_2a_3x3')))
    conv2b_3_3 = relu(batch_normalization(conv_2d(conv2a_3_3, 64, 3, bias=False, activation=None, name='Conv2d_2b_3x3')))
    maxpool3a_3_3 = max_pool_2d(conv2b_3_3, 3, strides=2, padding='VALID', name='MaxPool_3a_3x3')
    conv3b_1_1 = relu(batch_normalization(conv_2d(maxpool3a_3_3, 80, 1, bias=False, padding='VALID',activation=None, name='Conv2d_3b_1x1')))
    conv4a_3_3 = relu(batch_normalization(conv_2d(conv3b_1_1, 192, 3, bias=False, padding='VALID',activation=None, name='Conv2d_4a_3x3')))
    maxpool5a_3_3 = max_pool_2d(conv4a_3_3, 3, strides=2, padding='VALID', name='MaxPool_5a_3x3')

    tower_conv = relu(batch_normalization(conv_2d(maxpool5a_3_3, 96, 1, bias=False, activation=None, name='Conv2d_5b_b0_1x1')))

    tower_conv1_0 = relu(batch_normalization(conv_2d(maxpool5a_3_3, 48, 1, bias=False, activation=None, name='Conv2d_5b_b1_0a_1x1')))
    tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1_0, 64, 5, bias=False, activation=None, name='Conv2d_5b_b1_0b_5x5')))

    tower_conv2_0 = relu(batch_normalization(conv_2d(maxpool5a_3_3, 64, 1, bias=False, activation=None, name='Conv2d_5b_b2_0a_1x1')))
    tower_conv2_1 = relu(batch_normalization(conv_2d(tower_conv2_0, 96, 3, bias=False, activation=None, name='Conv2d_5b_b2_0b_3x3')))
    tower_conv2_2 = relu(batch_normalization(conv_2d(tower_conv2_1, 96, 3, bias=False, activation=None,name='Conv2d_5b_b2_0c_3x3')))

    tower_pool3_0 = avg_pool_2d(maxpool5a_3_3, 3, strides=1, padding='same', name='AvgPool_5b_b3_0a_3x3')
    tower_conv3_1 = relu(batch_normalization(conv_2d(tower_pool3_0, 64, 1, bias=False, activation=None,name='Conv2d_5b_b3_0b_1x1')))

    tower_5b_out = merge([tower_conv, tower_conv1_1, tower_conv2_2, tower_conv3_1], mode='concat', axis=3)

    net = repeat(tower_5b_out, 10, block35, scale=0.17)

    tower_conv = relu(batch_normalization(conv_2d(net, 384, 3, bias=False, strides=2,activation=None, padding='VALID', name='Conv2d_6a_b0_0a_3x3')))
    tower_conv1_0 = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, activation=None, name='Conv2d_6a_b1_0a_1x1')))
    tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1_0, 256, 3, bias=False, activation=None, name='Conv2d_6a_b1_0b_3x3')))
    tower_conv1_2 = relu(batch_normalization(conv_2d(tower_conv1_1, 384, 3, bias=False, strides=2, padding='VALID', activation=None,name='Conv2d_6a_b1_0c_3x3')))
    tower_pool = max_pool_2d(net, 3, strides=2, padding='VALID',name='MaxPool_1a_3x3')
    net = merge([tower_conv, tower_conv1_2, tower_pool], mode='concat', axis=3)
    net = repeat(net, 20, block17, scale=0.1)

    tower_conv = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, activation=None, name='Conv2d_0a_1x1')))
    # tower_conv0_1 = relu(batch_normalization(conv_2d(tower_conv, 384, 3, bias=False, strides=2, padding='VALID', activation=None,name='Conv2d_0a_1x1')))
    tower_conv0_1 = relu(batch_normalization(conv_2d(tower_conv, 384, 1, bias=False, strides=2, padding='VALID', activation=None,name='Conv2d_0a_1x1')))

    tower_conv1 = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, padding='VALID', activation=None,name='Conv2d_0a_1x1')))
    # tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1,288,3, bias=False, strides=2, padding='VALID',activation=None, name='COnv2d_1a_3x3')))
    tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1,288,1, bias=False, strides=2, padding='VALID',activation=None, name='COnv2d_1a_3x3')))

    tower_conv2 = relu(batch_normalization(conv_2d(net, 256,1, bias=False, activation=None,name='Conv2d_0a_1x1')))
    tower_conv2_1 = relu(batch_normalization(conv_2d(tower_conv2, 288,3, bias=False, name='Conv2d_0b_3x3',activation=None)))
    # tower_conv2_2 = relu(batch_normalization(conv_2d(tower_conv2_1, 320, 3, bias=False, strides=2, padding='VALID',activation=None, name='Conv2d_1a_3x3')))
    tower_conv2_2 = relu(batch_normalization(conv_2d(tower_conv2_1, 320, 1, bias=False, strides=2, padding='VALID',activation=None, name='Conv2d_1a_3x3')))

    # tower_pool = max_pool_2d(net, 3, strides=2, padding='VALID', name='MaxPool_1a_3x3')
    tower_pool = max_pool_2d(net, 1, strides=2, padding='VALID', name='MaxPool_1a_3x3')
    net = merge([tower_conv0_1, tower_conv1_1,tower_conv2_2, tower_pool], mode='concat', axis=3)

    net = repeat(net, 9, block8, scale=0.2)
    net = block8(net, activation=None)

    net = relu(batch_normalization(conv_2d(net, 1536, 1, bias=False, activation=None, name='Conv2d_7b_1x1')))
    net = avg_pool_2d(net, net.get_shape().as_list()[1:3],strides=2, padding='VALID', name='AvgPool_1a_8x8')
    net = flatten(net)
    net = dropout(net, dropout_keep_prob)
    loss = fully_connected(net, num_classes,activation='softmax')


    network = tflearn.regression(loss, optimizer='RMSprop',
                         loss='categorical_crossentropy',
                         learning_rate=0.0001, name='targets')
    return network
Exemplo n.º 25
0
def build_net(network,
              X,
              Y,
              num_classes,
              num_epochs,
              checkpoint_path,
              size_batch,
              Xval=None,
              Yval=None,
              dec_step=100,
              train=True):
    tn = tflearn.initializations.truncated_normal(seed=100)
    xav = tflearn.initializations.xavier(seed=100)
    nor = tflearn.initializations.normal(seed=100)

    network = conv_2d(network, 192, 5, weights_init=nor, regularizer='L2')
    network = batch_normalization(network)
    network = tflearn.activations.softplus(network)

    network = conv_2d(network, 160, 1, weights_init=nor, regularizer='L2')
    network = batch_normalization(network)
    network = tflearn.activations.softplus(network)

    network = conv_2d(network, 96, 1, weights_init=nor, regularizer='L2')
    network = batch_normalization(network)
    network = tflearn.activations.softplus(network)
    network = max_pool_2d(network, 3, strides=2)
    network = dropout(network, 0.5)

    network = conv_2d(network, 192, 5, weights_init=nor, regularizer='L2')
    network = batch_normalization(network)
    network = tflearn.activations.softplus(network)

    network = conv_2d(network, 192, 1, weights_init=nor, regularizer='L2')
    network = batch_normalization(network)
    network = tflearn.activations.softplus(network)

    network = conv_2d(network, 192, 1, weights_init=nor, regularizer='L2')
    network = batch_normalization(network)
    network = tflearn.activations.softplus(network)
    network = avg_pool_2d(network, 3, strides=2)
    network = dropout(network, 0.5)

    network = conv_2d(network, 192, 3, weights_init=nor, regularizer='L2')
    network = batch_normalization(network)
    network = tflearn.activations.softplus(network)

    network = conv_2d(network, 192, 1, weights_init=nor, regularizer='L2')
    network = batch_normalization(network)
    network = tflearn.activations.softplus(network)

    network = conv_2d(network,
                      num_classes,
                      1,
                      weights_init=nor,
                      regularizer='L2')
    network = batch_normalization(network)
    network = tflearn.activations.softplus(network)

    network = avg_pool_2d(network, 9)
    network = flatten(network)

    adadelta = tflearn.optimizers.AdaDelta(learning_rate=0.01,
                                           rho=0.95,
                                           epsilon=1e-08)

    network = regression(network,
                         optimizer=adadelta,
                         loss='softmax_categorical_crossentropy')

    # Train
    model = tflearn.DNN(network,
                        tensorboard_verbose=0,
                        checkpoint_path=checkpoint_path)
    if train:
        start_time = time.time()
        if Xval is None or Yval is None:
            model.fit(X,
                      Y,
                      n_epoch=num_epochs,
                      validation_set=0.0,
                      show_metric=True,
                      run_id='hsi_cnn_model',
                      shuffle=True,
                      batch_size=size_batch)
        else:
            model.fit(X,
                      Y,
                      n_epoch=num_epochs,
                      validation_set=(Xval, Yval),
                      show_metric=True,
                      run_id='hsi_cnn_model',
                      shuffle=True,
                      batch_size=size_batch)

        print("\n\n-------------train time: %s seconds\n\n" %
              (time.time() - start_time))

    return model
Exemplo n.º 26
0
def _model5():
    global yTest, img_aug
    tf.reset_default_graph()
    img_prep = ImagePreprocessing()
    img_prep.add_featurewise_zero_center()
    img_prep.add_featurewise_stdnorm()

    def block35(net, scale=1.0, activation="relu"):
        tower_conv = relu(batch_normalization(conv_2d(net, 32, 1, bias=False, activation=None, name='Conv2d_1x1')))
        tower_conv1_0 = relu(batch_normalization(conv_2d(net, 32, 1, bias=False, activation=None,name='Conv2d_0a_1x1')))
        tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1_0, 32, 3, bias=False, activation=None,name='Conv2d_0b_3x3')))
        tower_conv2_0 = relu(batch_normalization(conv_2d(net, 32, 1, bias=False, activation=None, name='Conv2d_0a_1x1')))
        tower_conv2_1 = relu(batch_normalization(conv_2d(tower_conv2_0, 48,3, bias=False, activation=None, name='Conv2d_0b_3x3')))
        tower_conv2_2 = relu(batch_normalization(conv_2d(tower_conv2_1, 64,3, bias=False, activation=None, name='Conv2d_0c_3x3')))
        tower_mixed = merge([tower_conv, tower_conv1_1, tower_conv2_2], mode='concat', axis=3)
        tower_out = relu(batch_normalization(conv_2d(tower_mixed, net.get_shape()[3], 1, bias=False, activation=None, name='Conv2d_1x1')))
        net += scale * tower_out
        if activation:
            if isinstance(activation, str):
                net = activations.get(activation)(net)
            elif hasattr(activation, '__call__'):
                net = activation(net)
            else:
                raise ValueError("Invalid Activation.")
        return net

    def block17(net, scale=1.0, activation="relu"):
        tower_conv = relu(batch_normalization(conv_2d(net, 192, 1, bias=False, activation=None, name='Conv2d_1x1')))
        tower_conv_1_0 = relu(batch_normalization(conv_2d(net, 128, 1, bias=False, activation=None, name='Conv2d_0a_1x1')))
        tower_conv_1_1 = relu(batch_normalization(conv_2d(tower_conv_1_0, 160,[1,7], bias=False, activation=None,name='Conv2d_0b_1x7')))
        tower_conv_1_2 = relu(batch_normalization(conv_2d(tower_conv_1_1, 192, [7,1], bias=False, activation=None,name='Conv2d_0c_7x1')))
        tower_mixed = merge([tower_conv,tower_conv_1_2], mode='concat', axis=3)
        tower_out = relu(batch_normalization(conv_2d(tower_mixed, net.get_shape()[3], 1, bias=False, activation=None, name='Conv2d_1x1')))
        net += scale * tower_out
        if activation:
            if isinstance(activation, str):
                net = activations.get(activation)(net)
            elif hasattr(activation, '__call__'):
                net = activation(net)
            else:
                raise ValueError("Invalid Activation.")
        return net


    def block8(net, scale=1.0, activation="relu"):
        tower_conv = relu(batch_normalization(conv_2d(net, 192, 1, bias=False, activation=None, name='Conv2d_1x1')))
        tower_conv1_0 = relu(batch_normalization(conv_2d(net, 192, 1, bias=False, activation=None, name='Conv2d_0a_1x1')))
        tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1_0, 224, [1,3], bias=False, activation=None, name='Conv2d_0b_1x3')))
        tower_conv1_2 = relu(batch_normalization(conv_2d(tower_conv1_1, 256, [3,1], bias=False, name='Conv2d_0c_3x1')))
        tower_mixed = merge([tower_conv,tower_conv1_2], mode='concat', axis=3)
        tower_out = relu(batch_normalization(conv_2d(tower_mixed, net.get_shape()[3], 1, bias=False, activation=None, name='Conv2d_1x1')))
        net += scale * tower_out
        if activation:
            if isinstance(activation, str):
                net = activations.get(activation)(net)
            elif hasattr(activation, '__call__'):
                net = activation(net)
            else:
                raise ValueError("Invalid Activation.")
        return net


    num_classes = len(Y[0])
    dropout_keep_prob = 0.8

    network = input_data(shape=[None, inputSize, inputSize, dim],
             name='input',
             data_preprocessing=img_prep,
             data_augmentation=img_aug)
    conv1a_3_3 = relu(batch_normalization(conv_2d(network, 32, 3, strides=2, bias=False, padding='VALID',activation=None,name='Conv2d_1a_3x3')))
    conv2a_3_3 = relu(batch_normalization(conv_2d(conv1a_3_3, 32, 3, bias=False, padding='VALID',activation=None, name='Conv2d_2a_3x3')))
    conv2b_3_3 = relu(batch_normalization(conv_2d(conv2a_3_3, 64, 3, bias=False, activation=None, name='Conv2d_2b_3x3')))
    maxpool3a_3_3 = max_pool_2d(conv2b_3_3, 3, strides=2, padding='VALID', name='MaxPool_3a_3x3')
    conv3b_1_1 = relu(batch_normalization(conv_2d(maxpool3a_3_3, 80, 1, bias=False, padding='VALID',activation=None, name='Conv2d_3b_1x1')))
    conv4a_3_3 = relu(batch_normalization(conv_2d(conv3b_1_1, 192, 3, bias=False, padding='VALID',activation=None, name='Conv2d_4a_3x3')))
    maxpool5a_3_3 = max_pool_2d(conv4a_3_3, 3, strides=2, padding='VALID', name='MaxPool_5a_3x3')

    tower_conv = relu(batch_normalization(conv_2d(maxpool5a_3_3, 96, 1, bias=False, activation=None, name='Conv2d_5b_b0_1x1')))

    tower_conv1_0 = relu(batch_normalization(conv_2d(maxpool5a_3_3, 48, 1, bias=False, activation=None, name='Conv2d_5b_b1_0a_1x1')))
    tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1_0, 64, 5, bias=False, activation=None, name='Conv2d_5b_b1_0b_5x5')))

    tower_conv2_0 = relu(batch_normalization(conv_2d(maxpool5a_3_3, 64, 1, bias=False, activation=None, name='Conv2d_5b_b2_0a_1x1')))
    tower_conv2_1 = relu(batch_normalization(conv_2d(tower_conv2_0, 96, 3, bias=False, activation=None, name='Conv2d_5b_b2_0b_3x3')))
    tower_conv2_2 = relu(batch_normalization(conv_2d(tower_conv2_1, 96, 3, bias=False, activation=None,name='Conv2d_5b_b2_0c_3x3')))

    tower_pool3_0 = avg_pool_2d(maxpool5a_3_3, 3, strides=1, padding='same', name='AvgPool_5b_b3_0a_3x3')
    tower_conv3_1 = relu(batch_normalization(conv_2d(tower_pool3_0, 64, 1, bias=False, activation=None,name='Conv2d_5b_b3_0b_1x1')))

    tower_5b_out = merge([tower_conv, tower_conv1_1, tower_conv2_2, tower_conv3_1], mode='concat', axis=3)

    net = repeat(tower_5b_out, 10, block35, scale=0.17)
    '''
    tower_conv = relu(batch_normalization(conv_2d(net, 384, 3, bias=False, strides=2,activation=None, padding='VALID', name='Conv2d_6a_b0_0a_3x3')))
    tower_conv1_0 = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, activation=None, name='Conv2d_6a_b1_0a_1x1')))
    tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1_0, 256, 3, bias=False, activation=None, name='Conv2d_6a_b1_0b_3x3')))
    tower_conv1_2 = relu(batch_normalization(conv_2d(tower_conv1_1, 384, 3, bias=False, strides=2, padding='VALID', activation=None,name='Conv2d_6a_b1_0c_3x3')))
    tower_pool = max_pool_2d(net, 3, strides=2, padding='VALID',name='MaxPool_1a_3x3')
    net = merge([tower_conv, tower_conv1_2, tower_pool], mode='concat', axis=3)
    net = repeat(net, 20, block17, scale=0.1)

    tower_conv = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, activation=None, name='Conv2d_0a_1x1')))
    tower_conv0_1 = relu(batch_normalization(conv_2d(tower_conv, 384, 3, bias=False, strides=2, padding='VALID', activation=None,name='Conv2d_0a_1x1')))

    tower_conv1 = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, padding='VALID', activation=None,name='Conv2d_0a_1x1')))
    tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1,288,3, bias=False, strides=2, padding='VALID',activation=None, name='COnv2d_1a_3x3')))

    tower_conv2 = relu(batch_normalization(conv_2d(net, 256,1, bias=False, activation=None,name='Conv2d_0a_1x1')))
    tower_conv2_1 = relu(batch_normalization(conv_2d(tower_conv2, 288,3, bias=False, name='Conv2d_0b_3x3',activation=None)))
    tower_conv2_2 = relu(batch_normalization(conv_2d(tower_conv2_1, 320, 3, bias=False, strides=2, padding='VALID',activation=None, name='Conv2d_1a_3x3')))
    
    tower_pool = max_pool_2d(net, 3, strides=2, padding='VALID', name='MaxPool_1a_3x3')
    '''
    tower_conv = relu(batch_normalization(conv_2d(net, 384, 1, bias=False, strides=2,activation=None, padding='VALID', name='Conv2d_6a_b0_0a_3x3')))
    tower_conv1_0 = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, activation=None, name='Conv2d_6a_b1_0a_1x1')))
    tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1_0, 256, 1, bias=False, activation=None, name='Conv2d_6a_b1_0b_3x3')))
    tower_conv1_2 = relu(batch_normalization(conv_2d(tower_conv1_1, 384, 1, bias=False, strides=2, padding='VALID', activation=None,name='Conv2d_6a_b1_0c_3x3')))
    tower_pool = max_pool_2d(net, 1, strides=2, padding='VALID',name='MaxPool_1a_3x3')
    net = merge([tower_conv, tower_conv1_2, tower_pool], mode='concat', axis=3)
    net = repeat(net, 20, block17, scale=0.1)

    tower_conv = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, activation=None, name='Conv2d_0a_1x1')))
    tower_conv0_1 = relu(batch_normalization(conv_2d(tower_conv, 384, 1, bias=False, strides=2, padding='VALID', activation=None,name='Conv2d_0a_1x1')))

    tower_conv1 = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, padding='VALID', activation=None,name='Conv2d_0a_1x1')))
    tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1,288,1, bias=False, strides=2, padding='VALID',activation=None, name='COnv2d_1a_3x3')))

    tower_conv2 = relu(batch_normalization(conv_2d(net, 256,1, bias=False, activation=None,name='Conv2d_0a_1x1')))
    tower_conv2_1 = relu(batch_normalization(conv_2d(tower_conv2, 288,1, bias=False, name='Conv2d_0b_3x3',activation=None)))
    tower_conv2_2 = relu(batch_normalization(conv_2d(tower_conv2_1, 320, 1, bias=False, strides=2, padding='VALID',activation=None, name='Conv2d_1a_3x3')))
    
    
    tower_pool = max_pool_2d(net, 1, strides=2, padding='VALID', name='MaxPool_1a_3x3')
    
    ####
    net = merge([tower_conv0_1, tower_conv1_1,tower_conv2_2, tower_pool], mode='concat', axis=3)

    net = repeat(net, 9, block8, scale=0.2)
    net = block8(net, activation=None)

    net = relu(batch_normalization(conv_2d(net, 1536, 1, bias=False, activation=None, name='Conv2d_7b_1x1')))
    net = avg_pool_2d(net, net.get_shape().as_list()[1:3],strides=2, padding='VALID', name='AvgPool_1a_8x8')
    net = flatten(net)
    net = dropout(net, dropout_keep_prob)
    loss = fully_connected(net, num_classes,activation='softmax')


    network = tflearn.regression(loss, optimizer='RMSprop',
                         loss='categorical_crossentropy',
                         learning_rate=0.0001)
    model = tflearn.DNN(network, checkpoint_path='inception_resnet_v2',
                        max_checkpoints=1, tensorboard_verbose=2, tensorboard_dir="./tflearn_logs/")

    model.fit(X, Y, n_epoch=epochNum, validation_set=(xTest, yTest), shuffle=True,
              show_metric=True, batch_size=batchNum, snapshot_step=2000,
              snapshot_epoch=False, run_id='inception_resnet_v2_oxflowers17')

    if modelStore: model.save(_id + '-model.tflearn')
Exemplo n.º 27
0
tower_conv0_1 = relu(batch_normalization(conv_2d(tower_conv, 384, 3, bias=False, strides=2, padding='VALID', activation=None,name='Conv2d_0a_1x1')))

tower_conv1 = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, padding='VALID', activation=None,name='Conv2d_0a_1x1')))
tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1,288,3, bias=False, strides=2, padding='VALID',activation=None, name='COnv2d_1a_3x3')))

tower_conv2 = relu(batch_normalization(conv_2d(net, 256,1, bias=False, activation=None,name='Conv2d_0a_1x1')))
tower_conv2_1 = relu(batch_normalization(conv_2d(tower_conv2, 288,3, bias=False, name='Conv2d_0b_3x3',activation=None)))
tower_conv2_2 = relu(batch_normalization(conv_2d(tower_conv2_1, 320, 3, bias=False, strides=2, padding='VALID',activation=None, name='Conv2d_1a_3x3')))

tower_pool = max_pool_2d(net, 3, strides=2, padding='VALID', name='MaxPool_1a_3x3')
net = merge([tower_conv0_1, tower_conv1_1,tower_conv2_2, tower_pool], mode='concat', axis=3)

net = repeat(net, 9, block8, scale=0.2)
net = block8(net, activation=None)

net = relu(batch_normalization(conv_2d(net, 1536, 1, bias=False, activation=None, name='Conv2d_7b_1x1')))
net = avg_pool_2d(net, net.get_shape().as_list()[1:3],strides=2, padding='VALID', name='AvgPool_1a_8x8')
net = flatten(net)
net = dropout(net, dropout_keep_prob)
loss = fully_connected(net, num_classes,activation='softmax')


network = tflearn.regression(loss, optimizer='RMSprop',
                     loss='categorical_crossentropy',
                     learning_rate=0.0001)
model = tflearn.DNN(network, checkpoint_path='inception_resnet_v2',
                    max_checkpoints=1, tensorboard_verbose=2, tensorboard_dir="./tflearn_logs/")
model.fit(X, Y, n_epoch=1000, validation_set=0.1, shuffle=True,
          show_metric=True, batch_size=32, snapshot_step=2000,
          snapshot_epoch=False, run_id='inception_resnet_v2_17flowers')
Exemplo n.º 28
0
def _model5():
    global yTest, img_aug
    tf.reset_default_graph()
    img_prep = ImagePreprocessing()
    img_prep.add_featurewise_zero_center()
    img_prep.add_featurewise_stdnorm()

    def block35(net, scale=1.0, activation="relu"):
        tower_conv = relu(batch_normalization(conv_2d(net, 32, 1, bias=False, activation=None, name='Conv2d_1x1')))
        tower_conv1_0 = relu(batch_normalization(conv_2d(net, 32, 1, bias=False, activation=None,name='Conv2d_0a_1x1')))
        tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1_0, 32, 3, bias=False, activation=None,name='Conv2d_0b_3x3')))
        tower_conv2_0 = relu(batch_normalization(conv_2d(net, 32, 1, bias=False, activation=None, name='Conv2d_0a_1x1')))
        tower_conv2_1 = relu(batch_normalization(conv_2d(tower_conv2_0, 48,3, bias=False, activation=None, name='Conv2d_0b_3x3')))
        tower_conv2_2 = relu(batch_normalization(conv_2d(tower_conv2_1, 64,3, bias=False, activation=None, name='Conv2d_0c_3x3')))
        tower_mixed = merge([tower_conv, tower_conv1_1, tower_conv2_2], mode='concat', axis=3)
        tower_out = relu(batch_normalization(conv_2d(tower_mixed, net.get_shape()[3], 1, bias=False, activation=None, name='Conv2d_1x1')))
        net += scale * tower_out
        if activation:
            if isinstance(activation, str):
                net = activations.get(activation)(net)
            elif hasattr(activation, '__call__'):
                net = activation(net)
            else:
                raise ValueError("Invalid Activation.")
        return net

    def block17(net, scale=1.0, activation="relu"):
        tower_conv = relu(batch_normalization(conv_2d(net, 192, 1, bias=False, activation=None, name='Conv2d_1x1')))
        tower_conv_1_0 = relu(batch_normalization(conv_2d(net, 128, 1, bias=False, activation=None, name='Conv2d_0a_1x1')))
        tower_conv_1_1 = relu(batch_normalization(conv_2d(tower_conv_1_0, 160,[1,7], bias=False, activation=None,name='Conv2d_0b_1x7')))
        tower_conv_1_2 = relu(batch_normalization(conv_2d(tower_conv_1_1, 192, [7,1], bias=False, activation=None,name='Conv2d_0c_7x1')))
        tower_mixed = merge([tower_conv,tower_conv_1_2], mode='concat', axis=3)
        tower_out = relu(batch_normalization(conv_2d(tower_mixed, net.get_shape()[3], 1, bias=False, activation=None, name='Conv2d_1x1')))
        net += scale * tower_out
        if activation:
            if isinstance(activation, str):
                net = activations.get(activation)(net)
            elif hasattr(activation, '__call__'):
                net = activation(net)
            else:
                raise ValueError("Invalid Activation.")
        return net


    def block8(net, scale=1.0, activation="relu"):
        tower_conv = relu(batch_normalization(conv_2d(net, 192, 1, bias=False, activation=None, name='Conv2d_1x1')))
        tower_conv1_0 = relu(batch_normalization(conv_2d(net, 192, 1, bias=False, activation=None, name='Conv2d_0a_1x1')))
        tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1_0, 224, [1,3], bias=False, activation=None, name='Conv2d_0b_1x3')))
        tower_conv1_2 = relu(batch_normalization(conv_2d(tower_conv1_1, 256, [3,1], bias=False, name='Conv2d_0c_3x1')))
        tower_mixed = merge([tower_conv,tower_conv1_2], mode='concat', axis=3)
        tower_out = relu(batch_normalization(conv_2d(tower_mixed, net.get_shape()[3], 1, bias=False, activation=None, name='Conv2d_1x1')))
        net += scale * tower_out
        if activation:
            if isinstance(activation, str):
                net = activations.get(activation)(net)
            elif hasattr(activation, '__call__'):
                net = activation(net)
            else:
                raise ValueError("Invalid Activation.")
        return net


    num_classes = len(yTest[0])
    dropout_keep_prob = 0.8

    network = input_data(shape=[None, inputSize, inputSize, dim],
             name='input',
             data_preprocessing=img_prep,
             data_augmentation=img_aug)
    conv1a_3_3 = relu(batch_normalization(conv_2d(network, 32, 3, strides=2, bias=False, padding='VALID',activation=None,name='Conv2d_1a_3x3')))
    conv2a_3_3 = relu(batch_normalization(conv_2d(conv1a_3_3, 32, 3, bias=False, padding='VALID',activation=None, name='Conv2d_2a_3x3')))
    conv2b_3_3 = relu(batch_normalization(conv_2d(conv2a_3_3, 64, 3, bias=False, activation=None, name='Conv2d_2b_3x3')))
    maxpool3a_3_3 = max_pool_2d(conv2b_3_3, 3, strides=2, padding='VALID', name='MaxPool_3a_3x3')
    conv3b_1_1 = relu(batch_normalization(conv_2d(maxpool3a_3_3, 80, 1, bias=False, padding='VALID',activation=None, name='Conv2d_3b_1x1')))
    conv4a_3_3 = relu(batch_normalization(conv_2d(conv3b_1_1, 192, 3, bias=False, padding='VALID',activation=None, name='Conv2d_4a_3x3')))
    maxpool5a_3_3 = max_pool_2d(conv4a_3_3, 3, strides=2, padding='VALID', name='MaxPool_5a_3x3')

    tower_conv = relu(batch_normalization(conv_2d(maxpool5a_3_3, 96, 1, bias=False, activation=None, name='Conv2d_5b_b0_1x1')))

    tower_conv1_0 = relu(batch_normalization(conv_2d(maxpool5a_3_3, 48, 1, bias=False, activation=None, name='Conv2d_5b_b1_0a_1x1')))
    tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1_0, 64, 5, bias=False, activation=None, name='Conv2d_5b_b1_0b_5x5')))

    tower_conv2_0 = relu(batch_normalization(conv_2d(maxpool5a_3_3, 64, 1, bias=False, activation=None, name='Conv2d_5b_b2_0a_1x1')))
    tower_conv2_1 = relu(batch_normalization(conv_2d(tower_conv2_0, 96, 3, bias=False, activation=None, name='Conv2d_5b_b2_0b_3x3')))
    tower_conv2_2 = relu(batch_normalization(conv_2d(tower_conv2_1, 96, 3, bias=False, activation=None,name='Conv2d_5b_b2_0c_3x3')))

    tower_pool3_0 = avg_pool_2d(maxpool5a_3_3, 3, strides=1, padding='same', name='AvgPool_5b_b3_0a_3x3')
    tower_conv3_1 = relu(batch_normalization(conv_2d(tower_pool3_0, 64, 1, bias=False, activation=None,name='Conv2d_5b_b3_0b_1x1')))

    tower_5b_out = merge([tower_conv, tower_conv1_1, tower_conv2_2, tower_conv3_1], mode='concat', axis=3)

    net = repeat(tower_5b_out, 10, block35, scale=0.17)

    '''
    tower_conv = relu(batch_normalization(conv_2d(net, 384, 3, bias=False, strides=2,activation=None, padding='VALID', name='Conv2d_6a_b0_0a_3x3')))
    tower_conv1_0 = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, activation=None, name='Conv2d_6a_b1_0a_1x1')))
    tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1_0, 256, 3, bias=False, activation=None, name='Conv2d_6a_b1_0b_3x3')))
    tower_conv1_2 = relu(batch_normalization(conv_2d(tower_conv1_1, 384, 3, bias=False, strides=2, padding='VALID', activation=None,name='Conv2d_6a_b1_0c_3x3')))
    tower_pool = max_pool_2d(net, 3, strides=2, padding='VALID',name='MaxPool_1a_3x3')
    net = merge([tower_conv, tower_conv1_2, tower_pool], mode='concat', axis=3)
    net = repeat(net, 20, block17, scale=0.1)

    tower_conv = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, activation=None, name='Conv2d_0a_1x1')))
    tower_conv0_1 = relu(batch_normalization(conv_2d(tower_conv, 384, 3, bias=False, strides=2, padding='VALID', activation=None,name='Conv2d_0a_1x1')))

    tower_conv1 = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, padding='VALID', activation=None,name='Conv2d_0a_1x1')))
    tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1,288,3, bias=False, strides=2, padding='VALID',activation=None, name='COnv2d_1a_3x3')))

    tower_conv2 = relu(batch_normalization(conv_2d(net, 256,1, bias=False, activation=None,name='Conv2d_0a_1x1')))
    tower_conv2_1 = relu(batch_normalization(conv_2d(tower_conv2, 288,3, bias=False, name='Conv2d_0b_3x3',activation=None)))
    tower_conv2_2 = relu(batch_normalization(conv_2d(tower_conv2_1, 320, 3, bias=False, strides=2, padding='VALID',activation=None, name='Conv2d_1a_3x3')))
    
    tower_pool = max_pool_2d(net, 3, strides=2, padding='VALID', name='MaxPool_1a_3x3')
    '''
    tower_conv = relu(batch_normalization(conv_2d(net, 384, 1, bias=False, strides=2,activation=None, padding='VALID', name='Conv2d_6a_b0_0a_3x3')))
    tower_conv1_0 = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, activation=None, name='Conv2d_6a_b1_0a_1x1')))
    tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1_0, 256, 1, bias=False, activation=None, name='Conv2d_6a_b1_0b_3x3')))
    tower_conv1_2 = relu(batch_normalization(conv_2d(tower_conv1_1, 384, 1, bias=False, strides=2, padding='VALID', activation=None,name='Conv2d_6a_b1_0c_3x3')))
    tower_pool = max_pool_2d(net, 1, strides=2, padding='VALID',name='MaxPool_1a_3x3')
    net = merge([tower_conv, tower_conv1_2, tower_pool], mode='concat', axis=3)
    net = repeat(net, 20, block17, scale=0.1)

    tower_conv = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, activation=None, name='Conv2d_0a_1x1')))
    tower_conv0_1 = relu(batch_normalization(conv_2d(tower_conv, 384, 1, bias=False, strides=2, padding='VALID', activation=None,name='Conv2d_0a_1x1')))

    tower_conv1 = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, padding='VALID', activation=None,name='Conv2d_0a_1x1')))
    tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1,288,1, bias=False, strides=2, padding='VALID',activation=None, name='COnv2d_1a_3x3')))

    tower_conv2 = relu(batch_normalization(conv_2d(net, 256,1, bias=False, activation=None,name='Conv2d_0a_1x1')))
    tower_conv2_1 = relu(batch_normalization(conv_2d(tower_conv2, 288,1, bias=False, name='Conv2d_0b_3x3',activation=None)))
    tower_conv2_2 = relu(batch_normalization(conv_2d(tower_conv2_1, 320, 1, bias=False, strides=2, padding='VALID',activation=None, name='Conv2d_1a_3x3')))
    
    
    tower_pool = max_pool_2d(net, 1, strides=2, padding='VALID', name='MaxPool_1a_3x3')
    
    ####
    net = merge([tower_conv0_1, tower_conv1_1,tower_conv2_2, tower_pool], mode='concat', axis=3)

    net = repeat(net, 9, block8, scale=0.2)
    net = block8(net, activation=None)

    net = relu(batch_normalization(conv_2d(net, 1536, 1, bias=False, activation=None, name='Conv2d_7b_1x1')))
    net = avg_pool_2d(net, net.get_shape().as_list()[1:3],strides=2, padding='VALID', name='AvgPool_1a_8x8')
    net = flatten(net)
    net = dropout(net, dropout_keep_prob)
    loss = fully_connected(net, num_classes,activation='softmax')


    network = tflearn.regression(loss, optimizer='RMSprop',
                         loss='categorical_crossentropy',
                         learning_rate=0.0001)
    model = tflearn.DNN(network, checkpoint_path='inception_resnet_v2',
                        max_checkpoints=1, tensorboard_verbose=2, tensorboard_dir="./tflearn_logs/")

    model.load(_path)
    pred = model.predict(xTest)

    df = pd.DataFrame(pred)
    df.to_csv(_path + ".csv")

    newList = pred.copy()
    newList = convert2(newList)
    if _CSV: makeCSV(newList)
    pred = convert2(pred)
    pred = convert3(pred)
    yTest = convert3(yTest)
    print(metrics.confusion_matrix(yTest, pred))
    print(metrics.classification_report(yTest, pred))
    print('Accuracy', accuracy_score(yTest, pred))
    print()
    if _wrFile: writeTest(pred)
Exemplo n.º 29
0
(X, Y), (X_test, Y_test) = cifar10.load_data()
X, Y = shuffle(X, Y)
Y = to_categorical(Y, 10)
Y_test = to_categorical(Y_test, 10)

# Building 'Network In Network'
network = input_data(shape=[None, 32, 32, 3])
network = conv_2d(network, 192, 5, activation='relu')
network = conv_2d(network, 160, 1, activation='relu')
network = conv_2d(network, 96, 1, activation='relu')
network = max_pool_2d(network, 3, strides=2)
network = dropout(network, 0.5)
network = conv_2d(network, 192, 5, activation='relu')
network = conv_2d(network, 192, 1, activation='relu')
network = conv_2d(network, 192, 1, activation='relu')
network = avg_pool_2d(network, 3, strides=2)
network = dropout(network, 0.5)
network = conv_2d(network, 192, 3, activation='relu')
network = conv_2d(network, 192, 1, activation='relu')
network = conv_2d(network, 10, 1, activation='relu')
network = avg_pool_2d(network, 8)
network = flatten(network)
network = regression(network, optimizer='adam',
                     loss='categorical_crossentropy',
                     learning_rate=0.001)

# Training
model = tflearn.DNN(network)
model.fit(X, Y, n_epoch=50, shuffle=True, validation_set=(X_test, Y_test),
          show_metric=True, batch_size=128, run_id='cifar10_net_in_net')
Exemplo n.º 30
0
  }
  # Build CNN
  input_data = input_data(shape=[None, 32, 32, 3], data_augmentation=img_aug)
  conv1 = conv_2d(input_data, 64, params['conv_filter'], activation='relu', regularizer='L2')
  pool1 = max_pool_2d(conv1, params['pool_width'], params['pool_stride'])
  lrn1 = local_response_normalization(pool1)

  conv2 = conv_2d(lrn1, 64, params['conv_filter'], activation='relu', regularizer='L2')
  pool2 = max_pool_2d(conv2, params['pool_width'], params['pool_stride'])
  lrn2 = local_response_normalization(pool2)

  conv3 = conv_2d(lrn2, 128, params['conv_filter'], activation='relu', regularizer='L2')
  pool3 = max_pool_2d(conv3, params['pool_width'], params['pool_stride'])
  lrn3 = local_response_normalization(pool3)

  flat = flatten(lrn3) 

  fully1 = fully_connected(lrn3, 384, activation='relu')
  drop1 = dropout(fully1, 0.5)
  fully2 = fully_connected(drop1, 384/2, activation='relu')
  drop2 = dropout(fully2, 0.5)
  fully3 = fully_connected(drop2, 10, activation='softmax')
  network = regression(fully3, optimizer='adam',
											 loss='categorical_crossentropy',
											 learning_rate=0.001, name='Target')

	# Define model
  model = tflearn.DNN(network, tensorboard_verbose=0, tensorboard_dir='../log/')

	# Predict
  model.load('../log/model_4/'+params['id']+'.tflearn')
Exemplo n.º 31
0
conv_1 = relu(conv_2d(network, 64, 7, strides=2, bias=True, padding=padding, activation=None, name='Conv2d_1'))
maxpool_1 = batch_normalization(max_pool_2d(conv_1, 3, strides=2, padding=padding, name='MaxPool_1'))
#LRN_1 = local_response_normalization(maxpool_1, name='LRN_1')
# FeatEX-1
conv_2a = relu(conv_2d(maxpool_1, 96, 1, strides=1, padding=padding, name='Conv_2a_FX1'))
maxpool_2a = max_pool_2d(maxpool_1, 3, strides=1, padding=padding, name='MaxPool_2a_FX1')
conv_2b = relu(conv_2d(conv_2a, 208, 3, strides=1, padding=padding, name='Conv_2b_FX1'))
conv_2c = relu(conv_2d(maxpool_2a, 64, 1, strides=1, padding=padding, name='Conv_2c_FX1'))
FX1_out = merge([conv_2b, conv_2c], mode='concat', axis=3, name='FX1_out')
# FeatEX-2
conv_3a = relu(conv_2d(FX1_out, 96, 1, strides=1, padding=padding, name='Conv_3a_FX2'))
maxpool_3a = max_pool_2d(FX1_out, 3, strides=1, padding=padding, name='MaxPool_3a_FX2')
conv_3b = relu(conv_2d(conv_3a, 208, 3, strides=1, padding=padding, name='Conv_3b_FX2'))
conv_3c = relu(conv_2d(maxpool_3a, 64, 1, strides=1, padding=padding, name='Conv_3c_FX2'))
FX2_out = merge([conv_3b, conv_3c], mode='concat', axis=3, name='FX2_out')
net = flatten(FX2_out)
if do:
    net = dropout(net, dropout_keep_prob)
loss = fully_connected(net, num_classes,activation='softmax')

# Compile the model and define the hyperparameters
network = tflearn.regression(loss, optimizer='Adam',
                     loss='categorical_crossentropy',
                     learning_rate=LR)

# Final definition of model checkpoints and other configurations
model = tflearn.DNN(network, checkpoint_path='../DeXpression/DeXpression_checkpoints',
                    max_checkpoints=1, tensorboard_verbose=2, tensorboard_dir="./tflearn_logs/")

# Save the model
#model.save('../DeXpression/DeXpression_checkpoints/' + RUNID + '.model')
Exemplo n.º 32
0
network = input_data(shape=[None, 32, 32, 3])
network = conv_2d(network, 192, 5, activation='relu')
network = conv_2d(network, 160, 1, activation='relu')
network = conv_2d(network, 96, 1, activation='relu')
network = max_pool_2d(network, 3, strides=2)
network = dropout(network, 0.5)
network = conv_2d(network, 192, 5, activation='relu')
network = conv_2d(network, 192, 1, activation='relu')
network = conv_2d(network, 192, 1, activation='relu')
network = avg_pool_2d(network, 3, strides=2)
network = dropout(network, 0.5)
network = conv_2d(network, 192, 3, activation='relu')
network = conv_2d(network, 192, 1, activation='relu')
network = conv_2d(network, 10, 1, activation='relu')
network = avg_pool_2d(network, 8)
network = flatten(network)
network = regression(network,
                     optimizer='adam',
                     loss='softmax_categorical_crossentropy',
                     learning_rate=0.001)

# training
model = tflearn.DNN(network)
model.fit(X,
          Y,
          n_epoch=50,
          shuffle=True,
          validation_set=(X_test, Y_test),
          show_metric=True,
          batch_size=128,
          run_id='cifar10_net_in_net')
Exemplo n.º 33
0
def inception_v2(width, height, learning_rate):
    num_classes = 17
    dropout_keep_prob = 0.8

    network = input_data(shape=[None, width, height, 3])
    conv1a_3_3 = relu(
        batch_normalization(
            conv_2d(network,
                    32,
                    3,
                    strides=2,
                    bias=False,
                    padding='VALID',
                    activation=None,
                    name='Conv2d_1a_3x3')))
    conv2a_3_3 = relu(
        batch_normalization(
            conv_2d(conv1a_3_3,
                    32,
                    3,
                    bias=False,
                    padding='VALID',
                    activation=None,
                    name='Conv2d_2a_3x3')))
    conv2b_3_3 = relu(
        batch_normalization(
            conv_2d(conv2a_3_3,
                    64,
                    3,
                    bias=False,
                    activation=None,
                    name='Conv2d_2b_3x3')))
    maxpool3a_3_3 = max_pool_2d(conv2b_3_3,
                                3,
                                strides=2,
                                padding='VALID',
                                name='MaxPool_3a_3x3')
    conv3b_1_1 = relu(
        batch_normalization(
            conv_2d(maxpool3a_3_3,
                    80,
                    1,
                    bias=False,
                    padding='VALID',
                    activation=None,
                    name='Conv2d_3b_1x1')))
    conv4a_3_3 = relu(
        batch_normalization(
            conv_2d(conv3b_1_1,
                    192,
                    3,
                    bias=False,
                    padding='VALID',
                    activation=None,
                    name='Conv2d_4a_3x3')))
    maxpool5a_3_3 = max_pool_2d(conv4a_3_3,
                                3,
                                strides=2,
                                padding='VALID',
                                name='MaxPool_5a_3x3')

    tower_conv = relu(
        batch_normalization(
            conv_2d(maxpool5a_3_3,
                    96,
                    1,
                    bias=False,
                    activation=None,
                    name='Conv2d_5b_b0_1x1')))

    tower_conv1_0 = relu(
        batch_normalization(
            conv_2d(maxpool5a_3_3,
                    48,
                    1,
                    bias=False,
                    activation=None,
                    name='Conv2d_5b_b1_0a_1x1')))
    tower_conv1_1 = relu(
        batch_normalization(
            conv_2d(tower_conv1_0,
                    64,
                    5,
                    bias=False,
                    activation=None,
                    name='Conv2d_5b_b1_0b_5x5')))

    tower_conv2_0 = relu(
        batch_normalization(
            conv_2d(maxpool5a_3_3,
                    64,
                    1,
                    bias=False,
                    activation=None,
                    name='Conv2d_5b_b2_0a_1x1')))
    tower_conv2_1 = relu(
        batch_normalization(
            conv_2d(tower_conv2_0,
                    96,
                    3,
                    bias=False,
                    activation=None,
                    name='Conv2d_5b_b2_0b_3x3')))
    tower_conv2_2 = relu(
        batch_normalization(
            conv_2d(tower_conv2_1,
                    96,
                    3,
                    bias=False,
                    activation=None,
                    name='Conv2d_5b_b2_0c_3x3')))

    tower_pool3_0 = avg_pool_2d(maxpool5a_3_3,
                                3,
                                strides=1,
                                padding='same',
                                name='AvgPool_5b_b3_0a_3x3')
    tower_conv3_1 = relu(
        batch_normalization(
            conv_2d(tower_pool3_0,
                    64,
                    1,
                    bias=False,
                    activation=None,
                    name='Conv2d_5b_b3_0b_1x1')))

    tower_5b_out = merge(
        [tower_conv, tower_conv1_1, tower_conv2_2, tower_conv3_1],
        mode='concat',
        axis=3)

    net = repeat(tower_5b_out, 10, block35, scale=0.17)

    tower_conv = relu(
        batch_normalization(
            conv_2d(net,
                    384,
                    3,
                    bias=False,
                    strides=2,
                    activation=None,
                    padding='VALID',
                    name='Conv2d_6a_b0_0a_3x3')))
    tower_conv1_0 = relu(
        batch_normalization(
            conv_2d(net,
                    256,
                    1,
                    bias=False,
                    activation=None,
                    name='Conv2d_6a_b1_0a_1x1')))
    tower_conv1_1 = relu(
        batch_normalization(
            conv_2d(tower_conv1_0,
                    256,
                    3,
                    bias=False,
                    activation=None,
                    name='Conv2d_6a_b1_0b_3x3')))
    tower_conv1_2 = relu(
        batch_normalization(
            conv_2d(tower_conv1_1,
                    384,
                    3,
                    bias=False,
                    strides=2,
                    padding='VALID',
                    activation=None,
                    name='Conv2d_6a_b1_0c_3x3')))
    tower_pool = max_pool_2d(net,
                             3,
                             strides=2,
                             padding='VALID',
                             name='MaxPool_1a_3x3')
    net = merge([tower_conv, tower_conv1_2, tower_pool], mode='concat', axis=3)
    net = repeat(net, 20, block17, scale=0.1)

    tower_conv = relu(
        batch_normalization(
            conv_2d(net,
                    256,
                    1,
                    bias=False,
                    activation=None,
                    name='Conv2d_0a_1x1')))
    tower_conv0_1 = relu(
        batch_normalization(
            conv_2d(tower_conv,
                    384,
                    3,
                    bias=False,
                    strides=2,
                    padding='VALID',
                    activation=None,
                    name='Conv2d_0a_1x1')))

    tower_conv1 = relu(
        batch_normalization(
            conv_2d(net,
                    256,
                    1,
                    bias=False,
                    padding='VALID',
                    activation=None,
                    name='Conv2d_0a_1x1')))
    tower_conv1_1 = relu(
        batch_normalization(
            conv_2d(tower_conv1,
                    288,
                    3,
                    bias=False,
                    strides=2,
                    padding='VALID',
                    activation=None,
                    name='COnv2d_1a_3x3')))

    tower_conv2 = relu(
        batch_normalization(
            conv_2d(net,
                    256,
                    1,
                    bias=False,
                    activation=None,
                    name='Conv2d_0a_1x1')))
    tower_conv2_1 = relu(
        batch_normalization(
            conv_2d(tower_conv2,
                    288,
                    3,
                    bias=False,
                    name='Conv2d_0b_3x3',
                    activation=None)))
    tower_conv2_2 = relu(
        batch_normalization(
            conv_2d(tower_conv2_1,
                    320,
                    3,
                    bias=False,
                    strides=2,
                    padding='VALID',
                    activation=None,
                    name='Conv2d_1a_3x3')))

    tower_pool = max_pool_2d(net,
                             3,
                             strides=2,
                             padding='VALID',
                             name='MaxPool_1a_3x3')
    net = merge([tower_conv0_1, tower_conv1_1, tower_conv2_2, tower_pool],
                mode='concat',
                axis=3)

    net = repeat(net, 9, block8, scale=0.2)
    net = block8(net, activation=None)

    net = relu(
        batch_normalization(
            conv_2d(net,
                    1536,
                    1,
                    bias=False,
                    activation=None,
                    name='Conv2d_7b_1x1')))
    net = avg_pool_2d(net,
                      net.get_shape().as_list()[1:3],
                      strides=2,
                      padding='VALID',
                      name='AvgPool_1a_8x8')
    net = flatten(net)
    net = dropout(net, dropout_keep_prob)
    loss = fully_connected(net, num_classes, activation='softmax')

    network = tflearn.regression(loss,
                                 optimizer='RMSprop',
                                 loss='categorical_crossentropy',
                                 learning_rate=learning_rate)
    #learning_rate=0.0001)
    model = tflearn.DNN(network,
                        checkpoint_path='inception_resnet_v2',
                        max_checkpoints=1,
                        tensorboard_verbose=2,
                        tensorboard_dir="./tflearn_logs/")
    return model
Exemplo n.º 34
0
    def imageFetch(self):
        image_path = self.path
        IMG_SIZE1 = 175
        IMG_SIZE2 = 150
        test_data = []
        image = cv2.imread(image_path, cv2.IMREAD_GRAYSCALE)
        test_image = cv2.resize(image, (IMG_SIZE1, IMG_SIZE2))
        test_data.append([np.array(test_image)])

        LR = 0.0001
        MODEL_NAME = 'ECR-{}-{}.model'.format(LR, '2conv-basic')
        convnet = input_data(shape=[None, IMG_SIZE1, IMG_SIZE2, 1],
                             name='input')

        convnet = conv_2d(convnet, 32, 2, activation='relu')
        convnet = max_pool_2d(convnet, 2)
        convnet = conv_2d(convnet, 32, 2, activation='relu')
        convnet = max_pool_2d(convnet, 2)
        convnet = conv_2d(convnet, 32, 2, activation='relu')
        convnet = max_pool_2d(convnet, 2)
        convnet = conv_2d(convnet, 64, 2, activation='relu')
        convnet = max_pool_2d(convnet, 2)
        convnet = dropout(convnet, 0.3)
        convnet = conv_2d(convnet, 64, 2, activation='relu')
        convnet = max_pool_2d(convnet, 2)
        convnet = dropout(convnet, 0.3)
        convnet = conv_2d(convnet, 64, 2, activation='relu')
        convnet = max_pool_2d(convnet, 2)
        convnet = dropout(convnet, 0.3)
        convnet = conv_2d(convnet, 128, 2, activation='relu')
        convnet = max_pool_2d(convnet, 2)
        convnet = conv_2d(convnet, 128, 2, activation='relu')
        convnet = max_pool_2d(convnet, 2)
        convnet = conv_2d(convnet, 128, 2, activation='relu')
        convnet = max_pool_2d(convnet, 2)

        convnet = flatten(convnet)

        convnet = fully_connected(convnet, 256, activation='relu')
        convnet = dropout(convnet, 0.3)
        convnet = fully_connected(convnet, 512, activation='relu')
        convnet = dropout(convnet, 0.3)
        convnet = fully_connected(convnet, 1024, activation='relu')

        convnet = fully_connected(convnet, 2, activation='softmax')
        convnet = regression(convnet,
                             optimizer='adam',
                             learning_rate=LR,
                             loss='binary_crossentropy',
                             name='targets')

        model = tflearn.DNN(convnet, tensorboard_dir='log')

        if os.path.exists('{}.meta'.format(MODEL_NAME)):
            model.load(MODEL_NAME)
            print('Explicit Content Sensor Loaded !')

        for num, data in enumerate(test_data[:]):
            test_img = data[0]
            test_img_reshaped = test_img.reshape(IMG_SIZE1, IMG_SIZE2, 1)
            model_out = model.predict([test_img_reshaped])[0]
            if np.argmax(model_out) == 1:
                str_label = 1
                # Non-Explicit Content
            else:
                str_label = 0
                # Explicit Content
            final_image = cv2.imread(image_path)
            cv2.namedWindow('Image Viewer', cv2.WINDOW_NORMAL)
            if str_label == 0:
                final_image = cv2.blur(final_image, (200, 200))
            cv2.imshow("Image Viewer", final_image)
            k = cv2.waitKey(0) & 0xFF
            if k == 27:
                cv2.destroyAllWindows()
Exemplo n.º 35
0
def ImageNetInceptionV2(outnode,
                        model_name,
                        target,
                        opt,
                        learn_r,
                        epch,
                        dropout_keep_rate,
                        save_model=False):
    def block35(net, scale=1.0, activation="relu"):
        tower_conv = relu(
            batch_normalization(
                conv_2d(net,
                        32,
                        1,
                        bias=False,
                        activation=None,
                        name='Conv2d_1x1')))
        tower_conv1_0 = relu(
            batch_normalization(
                conv_2d(net,
                        32,
                        1,
                        bias=False,
                        activation=None,
                        name='Conv2d_0a_1x1')))
        tower_conv1_1 = relu(
            batch_normalization(
                conv_2d(tower_conv1_0,
                        32,
                        3,
                        bias=False,
                        activation=None,
                        name='Conv2d_0b_3x3')))
        tower_conv2_0 = relu(
            batch_normalization(
                conv_2d(net,
                        32,
                        1,
                        bias=False,
                        activation=None,
                        name='Conv2d_0a_1x1')))
        tower_conv2_1 = relu(
            batch_normalization(
                conv_2d(tower_conv2_0,
                        48,
                        3,
                        bias=False,
                        activation=None,
                        name='Conv2d_0b_3x3')))
        tower_conv2_2 = relu(
            batch_normalization(
                conv_2d(tower_conv2_1,
                        64,
                        3,
                        bias=False,
                        activation=None,
                        name='Conv2d_0c_3x3')))
        tower_mixed = merge([tower_conv, tower_conv1_1, tower_conv2_2],
                            mode='concat',
                            axis=3)
        tower_out = relu(
            batch_normalization(
                conv_2d(tower_mixed,
                        net.get_shape()[3],
                        1,
                        bias=False,
                        activation=None,
                        name='Conv2d_1x1')))
        net += scale * tower_out
        if activation:
            if isinstance(activation, str):
                net = activations.get(activation)(net)
            elif hasattr(activation, '__call__'):
                net = activation(net)
            else:
                raise ValueError("Invalid Activation.")
        return net

    def block17(net, scale=1.0, activation="relu"):
        tower_conv = relu(
            batch_normalization(
                conv_2d(net,
                        192,
                        1,
                        bias=False,
                        activation=None,
                        name='Conv2d_1x1')))
        tower_conv_1_0 = relu(
            batch_normalization(
                conv_2d(net,
                        128,
                        1,
                        bias=False,
                        activation=None,
                        name='Conv2d_0a_1x1')))
        tower_conv_1_1 = relu(
            batch_normalization(
                conv_2d(tower_conv_1_0,
                        160, [1, 7],
                        bias=False,
                        activation=None,
                        name='Conv2d_0b_1x7')))
        tower_conv_1_2 = relu(
            batch_normalization(
                conv_2d(tower_conv_1_1,
                        192, [7, 1],
                        bias=False,
                        activation=None,
                        name='Conv2d_0c_7x1')))
        tower_mixed = merge([tower_conv, tower_conv_1_2],
                            mode='concat',
                            axis=3)
        tower_out = relu(
            batch_normalization(
                conv_2d(tower_mixed,
                        net.get_shape()[3],
                        1,
                        bias=False,
                        activation=None,
                        name='Conv2d_1x1')))
        net += scale * tower_out
        if activation:
            if isinstance(activation, str):
                net = activations.get(activation)(net)
            elif hasattr(activation, '__call__'):
                net = activation(net)
            else:
                raise ValueError("Invalid Activation.")
        return net

    def block8(net, scale=1.0, activation="relu"):
        tower_conv = relu(
            batch_normalization(
                conv_2d(net,
                        192,
                        1,
                        bias=False,
                        activation=None,
                        name='Conv2d_1x1')))
        tower_conv1_0 = relu(
            batch_normalization(
                conv_2d(net,
                        192,
                        1,
                        bias=False,
                        activation=None,
                        name='Conv2d_0a_1x1')))
        tower_conv1_1 = relu(
            batch_normalization(
                conv_2d(tower_conv1_0,
                        224, [1, 3],
                        bias=False,
                        activation=None,
                        name='Conv2d_0b_1x3')))
        tower_conv1_2 = relu(
            batch_normalization(
                conv_2d(tower_conv1_1,
                        256, [3, 1],
                        bias=False,
                        name='Conv2d_0c_3x1')))
        tower_mixed = merge([tower_conv, tower_conv1_2], mode='concat', axis=3)
        tower_out = relu(
            batch_normalization(
                conv_2d(tower_mixed,
                        net.get_shape()[3],
                        1,
                        bias=False,
                        activation=None,
                        name='Conv2d_1x1')))
        net += scale * tower_out
        if activation:
            if isinstance(activation, str):
                net = activations.get(activation)(net)
            elif hasattr(activation, '__call__'):
                net = activation(net)
            else:
                raise ValueError("Invalid Activation.")
        return net

    # default = 0.8
    dropout_keep_prob = dropout_keep_rate

    network = input_data(shape=[None, IMG_SIZE, IMG_SIZE, 1], name='input')
    conv1a_3_3 = relu(
        batch_normalization(
            conv_2d(network,
                    32,
                    3,
                    strides=2,
                    bias=False,
                    padding='VALID',
                    activation=None,
                    name='Conv2d_1a_3x3')))
    conv2a_3_3 = relu(
        batch_normalization(
            conv_2d(conv1a_3_3,
                    32,
                    3,
                    bias=False,
                    padding='VALID',
                    activation=None,
                    name='Conv2d_2a_3x3')))
    conv2b_3_3 = relu(
        batch_normalization(
            conv_2d(conv2a_3_3,
                    64,
                    3,
                    bias=False,
                    activation=None,
                    name='Conv2d_2b_3x3')))
    maxpool3a_3_3 = max_pool_2d(conv2b_3_3,
                                3,
                                strides=2,
                                padding='VALID',
                                name='MaxPool_3a_3x3')
    conv3b_1_1 = relu(
        batch_normalization(
            conv_2d(maxpool3a_3_3,
                    80,
                    1,
                    bias=False,
                    padding='VALID',
                    activation=None,
                    name='Conv2d_3b_1x1')))
    conv4a_3_3 = relu(
        batch_normalization(
            conv_2d(conv3b_1_1,
                    192,
                    3,
                    bias=False,
                    padding='VALID',
                    activation=None,
                    name='Conv2d_4a_3x3')))
    maxpool5a_3_3 = max_pool_2d(conv4a_3_3,
                                3,
                                strides=2,
                                padding='VALID',
                                name='MaxPool_5a_3x3')

    tower_conv = relu(
        batch_normalization(
            conv_2d(maxpool5a_3_3,
                    96,
                    1,
                    bias=False,
                    activation=None,
                    name='Conv2d_5b_b0_1x1')))

    tower_conv1_0 = relu(
        batch_normalization(
            conv_2d(maxpool5a_3_3,
                    48,
                    1,
                    bias=False,
                    activation=None,
                    name='Conv2d_5b_b1_0a_1x1')))
    tower_conv1_1 = relu(
        batch_normalization(
            conv_2d(tower_conv1_0,
                    64,
                    5,
                    bias=False,
                    activation=None,
                    name='Conv2d_5b_b1_0b_5x5')))

    tower_conv2_0 = relu(
        batch_normalization(
            conv_2d(maxpool5a_3_3,
                    64,
                    1,
                    bias=False,
                    activation=None,
                    name='Conv2d_5b_b2_0a_1x1')))
    tower_conv2_1 = relu(
        batch_normalization(
            conv_2d(tower_conv2_0,
                    96,
                    3,
                    bias=False,
                    activation=None,
                    name='Conv2d_5b_b2_0b_3x3')))
    tower_conv2_2 = relu(
        batch_normalization(
            conv_2d(tower_conv2_1,
                    96,
                    3,
                    bias=False,
                    activation=None,
                    name='Conv2d_5b_b2_0c_3x3')))

    tower_pool3_0 = avg_pool_2d(maxpool5a_3_3,
                                3,
                                strides=1,
                                padding='same',
                                name='AvgPool_5b_b3_0a_3x3')
    tower_conv3_1 = relu(
        batch_normalization(
            conv_2d(tower_pool3_0,
                    64,
                    1,
                    bias=False,
                    activation=None,
                    name='Conv2d_5b_b3_0b_1x1')))

    tower_5b_out = merge(
        [tower_conv, tower_conv1_1, tower_conv2_2, tower_conv3_1],
        mode='concat',
        axis=3)

    net = repeat(tower_5b_out, 10, block35, scale=0.17)

    tower_conv = relu(
        batch_normalization(
            conv_2d(net,
                    384,
                    3,
                    bias=False,
                    strides=2,
                    activation=None,
                    padding='VALID',
                    name='Conv2d_6a_b0_0a_3x3')))
    tower_conv1_0 = relu(
        batch_normalization(
            conv_2d(net,
                    256,
                    1,
                    bias=False,
                    activation=None,
                    name='Conv2d_6a_b1_0a_1x1')))
    tower_conv1_1 = relu(
        batch_normalization(
            conv_2d(tower_conv1_0,
                    256,
                    3,
                    bias=False,
                    activation=None,
                    name='Conv2d_6a_b1_0b_3x3')))
    tower_conv1_2 = relu(
        batch_normalization(
            conv_2d(tower_conv1_1,
                    384,
                    3,
                    bias=False,
                    strides=2,
                    padding='VALID',
                    activation=None,
                    name='Conv2d_6a_b1_0c_3x3')))
    tower_pool = max_pool_2d(net,
                             3,
                             strides=2,
                             padding='VALID',
                             name='MaxPool_1a_3x3')
    net = merge([tower_conv, tower_conv1_2, tower_pool], mode='concat', axis=3)
    net = repeat(net, 20, block17, scale=0.1)

    tower_conv = relu(
        batch_normalization(
            conv_2d(net,
                    256,
                    1,
                    bias=False,
                    activation=None,
                    name='Conv2d_0a_1x1')))
    tower_conv0_1 = relu(
        batch_normalization(
            conv_2d(tower_conv,
                    384,
                    3,
                    bias=False,
                    strides=2,
                    padding='VALID',
                    activation=None,
                    name='Conv2d_0a_1x1')))

    tower_conv1 = relu(
        batch_normalization(
            conv_2d(net,
                    256,
                    1,
                    bias=False,
                    padding='VALID',
                    activation=None,
                    name='Conv2d_0a_1x1')))
    tower_conv1_1 = relu(
        batch_normalization(
            conv_2d(tower_conv1,
                    288,
                    3,
                    bias=False,
                    strides=2,
                    padding='VALID',
                    activation=None,
                    name='COnv2d_1a_3x3')))

    tower_conv2 = relu(
        batch_normalization(
            conv_2d(net,
                    256,
                    1,
                    bias=False,
                    activation=None,
                    name='Conv2d_0a_1x1')))
    tower_conv2_1 = relu(
        batch_normalization(
            conv_2d(tower_conv2,
                    288,
                    3,
                    bias=False,
                    name='Conv2d_0b_3x3',
                    activation=None)))
    tower_conv2_2 = relu(
        batch_normalization(
            conv_2d(tower_conv2_1,
                    320,
                    3,
                    bias=False,
                    strides=2,
                    padding='VALID',
                    activation=None,
                    name='Conv2d_1a_3x3')))

    tower_pool = max_pool_2d(net,
                             3,
                             strides=2,
                             padding='VALID',
                             name='MaxPool_1a_3x3')
    net = merge([tower_conv0_1, tower_conv1_1, tower_conv2_2, tower_pool],
                mode='concat',
                axis=3)

    net = repeat(net, 9, block8, scale=0.2)
    net = block8(net, activation=None)

    net = relu(
        batch_normalization(
            conv_2d(net,
                    1536,
                    1,
                    bias=False,
                    activation=None,
                    name='Conv2d_7b_1x1')))
    net = avg_pool_2d(net,
                      net.get_shape().as_list()[1:3],
                      strides=2,
                      padding='VALID',
                      name='AvgPool_1a_8x8')
    net = flatten(net)
    net = dropout(net, dropout_keep_prob)
    loss = fully_connected(net, outnode, activation='softmax')

    str_model_name = "{}_{}_{}_{}_{}_{}".format(model_name, target, opt,
                                                learn_r, epch,
                                                dropout_keep_rate)

    network = tflearn.regression(loss,
                                 optimizer=opt,
                                 loss='categorical_crossentropy',
                                 learning_rate=learn_r,
                                 name='targets')
    model = None
    if save_model:
        model = tflearn.DNN(
            network,
            checkpoint_path='../tflearnModels/{}'.format(str_model_name),
            best_checkpoint_path='../tflearnModels/bestModels/best_{}'.format(
                str_model_name),
            max_checkpoints=1,
            tensorboard_verbose=0,
            tensorboard_dir="../tflearnLogs/{}/".format(str_model_name))
    else:
        model = tflearn.DNN(network)

    return model
Exemplo n.º 36
0
    def videoFetch(self,thread_id):
        path = self.path
        img_width = 175
        img_height = 150
        testing_data = []
        start_time = time.time()
        cap = cv2.VideoCapture(path)
        frame_count = cap.get(cv2.CAP_PROP_FRAME_COUNT) - 1
        fragment_size = frame_count/8
        init_frame = math.floor(fragment_size*thread_id)
        end_frame = math.floor(fragment_size*(thread_id+1)-1)
        count = init_frame
        cap.set(1,init_frame)
        while cap.isOpened():
            ret, frame = cap.read()
            if(ret):
                img = cv2.resize(frame, (img_width,img_height))
                img = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
                img_num = "%#05d" % (count+1)
                testing_data.append([np.array(img),img_num])
            count = count+1
            if (count == end_frame):
                end_time = time.time()
                cap.release()
                print ("{} Done extracting frames.\n{} frames found".format(thread_id,end_frame-init_frame))
                print ("It took %d seconds forconversion." % (end_time-start_time))
                break
        # np.save('/home/ghost/Desktop/ecc/test_data_{}.npy'.format(thread_id), testing_data)
        LR = 0.0001
        MODEL_NAME = 'ECR-{}-{}.model'.format(LR, '2conv-basic')
        tf.reset_default_graph()
        convnet = input_data(shape=[None, IMG_SIZE1, IMG_SIZE2, 1], name='input')
		convnet = conv_2d(convnet, 32, 2, activation='relu')
		convnet = max_pool_2d(convnet, 2)
		convnet = conv_2d(convnet, 32, 2, activation='relu')
		convnet = max_pool_2d(convnet, 2)
		convnet = conv_2d(convnet, 32, 2, activation='relu')
		convnet = max_pool_2d(convnet, 2)
		convnet = conv_2d(convnet, 64, 2, activation='relu')
		convnet = max_pool_2d(convnet, 2)
		convnet = dropout(convnet, 0.2)
		convnet = conv_2d(convnet, 64, 2, activation='relu')
		convnet = max_pool_2d(convnet, 2)
		convnet = conv_2d(convnet, 64, 2, activation='relu')
		convnet = max_pool_2d(convnet, 2)
		convnet = conv_2d(convnet, 128, 2, activation='relu')
		convnet = max_pool_2d(convnet, 2)
		convnet = conv_2d(convnet, 128, 2, activation='relu')
		convnet = max_pool_2d(convnet, 2)

		convnet = flatten(convnet)

		convnet = fully_connected(convnet, 1024, activation='relu')
		convnet = dropout(convnet, 0.4)

		convnet = fully_connected(convnet, 2, activation='softmax')
		convnet = regression(convnet, optimizer='adam', learning_rate=LR, loss='binary_crossentropy', name='targets')

		model = tflearn.DNN(convnet, tensorboard_dir='log')



		if os.path.exists('{}.meta'.format(MODEL_NAME)):
		    model.load('../'+MODEL_NAME)
		    print('model loaded!')