def neural_network_model4(IMG_SIZE, LR): convnet = input_data(shape=[None, IMG_SIZE, IMG_SIZE, 1], name='input') convnet = conv_2d(convnet, 64, (5, 5), activation='relu') convnet = max_pool_2d(convnet, (5, 5), strides=(2, 2)) convnet = conv_2d(convnet, 64, (3, 3), activation='relu') convnet = conv_2d(convnet, 64, (3, 3), activation='relu') convnet = avg_pool_2d(convnet, (3, 3), strides=(2, 2)) convnet = dropout(convnet, 0.7) convnet = conv_2d(convnet, 128, (3, 3), activation='relu') convnet = conv_2d(convnet, 128, (3, 3), activation='relu') convnet = avg_pool_2d(convnet, (3, 3), strides=(2, 2)) convnet = dropout(convnet, 0.7) convnet = flatten(convnet) convnet = fully_connected(convnet, 1024, activation='relu') convnet = dropout(convnet, 0.7) convnet = fully_connected(convnet, 1024, activation='relu') convnet = dropout(convnet, 0.7) convnet = fully_connected(convnet, 7, activation='softmax') convnet = regression(convnet, optimizer='adam', learning_rate=LR, loss='categorical_crossentropy', name='targets') convnet = dropout(convnet, 0.7) model = tflearn.DNN(convnet, tensorboard_dir='log') return model
def generate_model(): convnet = input_data(shape=[None, 200, 200, 3], name='input') convnet = conv_2d(convnet, 32, 2, strides=[1, 2, 2, 1], activation='relu') convnet = max_pool_2d(convnet, [1, 2, 2, 1]) convnet = conv_2d(convnet, 64, 2, strides=[1, 2, 2, 1], activation='relu') convnet = avg_pool_2d(convnet, [1, 2, 2, 1]) convnet = conv_2d(convnet, 128, 2, strides=[1, 2, 2, 1], activation='relu') convnet = max_pool_2d(convnet, [1, 2, 2, 1]) convnet = conv_2d(convnet, 256, 2, strides=[1, 2, 2, 1], activation='relu') convnet = max_pool_2d(convnet, [1, 2, 2, 1]) convnet = conv_2d(convnet, 128, 2, strides=[1, 2, 2, 1], activation='relu') convnet = max_pool_2d(convnet, [1, 2, 2, 1]) convnet = conv_2d(convnet, 64, 2, strides=[1, 2, 2, 1], activation='relu') convnet = avg_pool_2d(convnet, [1, 2, 2, 1]) convnet = conv_2d(convnet, 32, 2, strides=[1, 2, 2, 1], activation='relu') convnet = max_pool_2d(convnet, [1, 2, 2, 1]) convnet = dropout(convnet, 0.8) convnet = fully_connected(convnet, 2, activation='softmax') convnet = regression(convnet, learning_rate=0.001, loss='categorical_crossentropy', optimizer='adam') model = tflearn.DNN(convnet, tensorboard_verbose=3, tensorboard_dir=vis_dir) return model
def CNNModel(x, reuse=False): conv_1 = conv_2d(x, 8, [32, 1], activation='relu', regularizer="L2", scope='conv_1', reuse=reuse) avg_pool_1 = avg_pool_2d(conv_1, [1, 2]) output_layer_1 = local_response_normalization(avg_pool_1) conv_2 = conv_2d(output_layer_1, 16, [1, 1], activation='relu', regularizer="L2", scope='conv_2', reuse=reuse) avg_pool_2 = avg_pool_2d(conv_2, [1, 2]) output_layer_2 = local_response_normalization(avg_pool_2) output_conv = flatten(output_layer_2) return output_conv
def light_convolution(network, input, hidden_number, default_hidden): def central_crop(input, shape): x = input.get_shape()[1] // 2 - shape[0] // 2 y = input.get_shape()[2] // 2 - shape[1] // 2 return input[0:, x:x + shape[0], y:y + shape[1], 0:] with tf.variable_scope('light_convolution'): input = tf.reshape(input, [-1, 32, 32, 3]) input = avg_pool_2d(input, 2) input = tf.reduce_mean(input, axis=3, keep_dims=True) input = conv_2d(input, 32, 3, activation='relu', regularizer='L2') input = max_pool_2d(input, 2) input = conv_2d(input, 64, 3, activation='relu', regularizer='L2') input = max_pool_2d(input, 2) input = tf.reshape(input, [ -1, input.get_shape()[1] * input.get_shape()[2] * input.get_shape()[3] ]) input, _ = default_hidden(network, input, hidden_number) variable_scope = tf.get_variable_scope().name if variable_scope: variable_scope += '/' return input, tf.global_variables(variable_scope)
def All_extract(self, input_x, layer_name): with tf.name_scope(layer_name): output_height = int(np.shape(input_x)[1]) output_weight = int(np.shape(input_x)[2]) local_x_list = [] logits_list = [] # all fig local_x1 = avg_pool_2d(input_x[:, :, :, :], (output_height, output_weight), name=layer_name + '_AvgPool2D') # local_x1 = global_avg_pool(input_x, name='Global_avg_pooling') local_x1 = tf.layers.dropout(inputs=local_x1, rate=self.drop_rate, training=self.training, name=layer_name + '_drop1') local_x1 = conv_layer(local_x1, filter=256, kernel=[1, 1], layer_name=layer_name + '_split_conv') local_x1 = Batch_Normalization(local_x1, training=self.training, scope=layer_name + '_batch') local_x1 = flatten(local_x1) local_x_list.append(local_x1) local_x1 = tf.layers.dropout(inputs=local_x1, rate=self.drop_rate, training=self.training, name=layer_name + '_drop2') local_x1 = Fully_connected(local_x1, self.class_num, layer_name=layer_name + '_fully_connected') logits_list.append(local_x1) return local_x_list, logits_list
def NiN(input_layer, num_class): network = conv_2d(input_layer, 192, 5, activation='relu') network = conv_2d(network, 160, 1, activation='relu') network = conv_2d(network, 96, 1, activation='relu') network = max_pool_2d(network, 3, strides=2) network = dropout(network, 0.5) network = conv_2d(network, 192, 5, activation='relu') network = conv_2d(network, 192, 1, activation='relu') network = conv_2d(network, 192, 1, activation='relu') network = avg_pool_2d(network, 3, strides=2) network = dropout(network, 0.5) network = conv_2d(network, 192, 3, activation='relu') network = conv_2d(network, 192, 1, activation='relu') network = conv_2d(network, 10, 1, activation='relu') network = avg_pool_2d(network, 8) network = flatten(network) return network
def model_lenet5(data): layer1_conv = conv_2d(data, nb_filter=6, filter_size=5, strides=[1, 1, 1, 1], activation='relu', padding='SAME', bias=True) layer1_pool = avg_pool_2d(layer1_conv, kernel_size=2, strides=2, padding='SAME') layer2_conv = conv_2d(layer1_pool, nb_filter=16, filter_size=5, strides=[1, 1, 1, 1], activation='relu', padding='VALID', bias=True) layer2_pool = avg_pool_2d(layer2_conv, kernel_size=2, strides=2, padding='SAME') flat_layer = flatten(layer2_pool) layer3_fccd = fully_connected(flat_layer, n_units=120, activation='relu', bias=True) layer4_fccd = fully_connected(layer3_fccd, n_units=84, activation='relu', bias=True) #w = tf.Variable(tf.truncated_normal([84, 10], stddev=0.1)) #b = tf.Variable(tf.constant(1.0, shape = [10])) #logits = tf.matmul(layer4_fccd, w) + b logits = fully_connected(layer3_fccd, n_units=10, activation='relu', bias=True) return logits
def Net_in_Net1(network): network = conv_2d(network, 192, 5, activation='relu') network = conv_2d(network, 160, 1, activation='relu') network = conv_2d(network, 96, 1, activation='relu') network = max_pool_2d(network, 3, strides=2) network = dropout(network, 1.0) network = conv_2d(network, 192, 5, activation='relu') network = conv_2d(network, 192, 1, activation='relu') network = conv_2d(network, 192, 1, activation='relu') network = avg_pool_2d(network, 3, strides=2) network = dropout(network, 1.0) network = conv_2d(network, 192, 3, activation='relu') network = conv_2d(network, 192, 1, activation='relu') network = conv_2d(network, 10, 1, activation='relu') network = avg_pool_2d(network, 8) network = flatten(network) network = fully_connected(network, output_dim, activation='softmax') return network
def inception_block_a(input_a): inception_a_conv1_1_1 = conv_2d(input_a, 96, 1, activation='relu', name='inception_a_conv1_1_1') inception_a_conv1_3_3_reduce = conv_2d(input_a, 64, 1, activation='relu', name='inception_a_conv1_3_3_reduce') inception_a_conv1_3_3 = conv_2d(inception_a_conv1_3_3_reduce, 96, 3, activation='relu', name='inception_a_conv1_3_3') inception_a_conv2_3_3_reduce = conv_2d(input_a, 64, 1, activation='relu', name='inception_a_conv2_3_3_reduce') inception_a_conv2_3_3_sym_1 = conv_2d(inception_a_conv2_3_3_reduce, 96, 3, activation='relu', name='inception_a_conv2_3_3') inception_a_conv2_3_3 = conv_2d(inception_a_conv2_3_3_sym_1, 96, 3, activation='relu', name='inception_a_conv2_3_3') inception_a_pool = avg_pool_2d(input_a, kernel_size=3, name='inception_a_pool', strides=1) inception_a_pool_1_1 = conv_2d(inception_a_pool, 96, 1, activation='relu', name='inception_a_pool_1_1') # merge inception_a inception_a = merge([ inception_a_conv1_1_1, inception_a_conv1_3_3, inception_a_conv2_3_3, inception_a_pool_1_1 ], mode='concat', axis=3) return inception_a
def model(img): MODEL_NAME = 'LeNet_5_COLOR' IMG_SIZE = 100 convnet = input_data(shape=[None, IMG_SIZE, IMG_SIZE, 3], name='input') convnet = conv_2d(convnet, nb_filter=6, filter_size=5, strides=1, activation='relu') convnet = avg_pool_2d(convnet, kernel_size=2, strides=2) convnet = conv_2d(convnet, nb_filter=16, filter_size=5, strides=1, activation='relu') convnet = avg_pool_2d(convnet, kernel_size=2, strides=2) convnet = fully_connected(convnet, 120, activation='relu') convnet = fully_connected(convnet, 84, activation='relu') convnet = fully_connected(convnet, 10, activation='softmax') convnet = regression(convnet, optimizer='adam', learning_rate=0.001, loss='categorical_crossentropy', name='targets') model = tflearn.DNN(convnet, tensorboard_dir='log', tensorboard_verbose=3) #model.fit({'input':X_train}, {'targets' : Y_train}, n_epoch = 10, validation_set= ({'input':X_test},{'targets':Y_test}), snapshot_step =500, show_metric = True, run_id= MODEL_NAME) if os.path.exists( '/home/manish/Manish/Hackathon/google-images-download-master/{}.meta' .format(MODEL_NAME)): model.load(MODEL_NAME) print('Model Loaded') model_out = model.predict([img])[0] print(model_out) label = np.argmax(model_out) print(items[label]) print(label) return items[label]
def build_network_in_network(self, network): # network = input_data(shape=[None, 32, 32, 3]) network = conv_2d(network, 192, 5, activation='relu') network = conv_2d(network, 160, 1, activation='relu') network = conv_2d(network, 96, 1, activation='relu') network = max_pool_2d(network, 3, strides=2) network = dropout(network, 0.5) network = conv_2d(network, 192, 5, activation='relu') network = conv_2d(network, 192, 1, activation='relu') network = conv_2d(network, 192, 1, activation='relu') network = avg_pool_2d(network, 3, strides=2) network = dropout(network, 0.5) network = conv_2d(network, 192, 3, activation='relu') network = conv_2d(network, 192, 1, activation='relu') network = conv_2d(network, 2, 1, activation='relu') network = avg_pool_2d(network, 8) network = flatten(network) network = regression(network, optimizer='adam', loss='softmax_categorical_crossentropy', learning_rate=0.001) return network
def net(input_image): ratios = [32, 16, 8, 4, 2, 1] conv_num = 8 network = [] for i in range(len(ratios)): network.append(avg_pool_2d(input_image, ratios[i])) # block_i_0, block_i_1, block_i_2 for block in range(3): with tf.name_scope('block_%d_%d' % (i, block)): filter_size = 1 if (block + 1) % 3 == 0 else 3 network[i] = tflearn.conv_2d(network[i], nb_filter=conv_num, filter_size=filter_size, weights_init='xavier', name='Conv_%d_%d' % (i, block)) network[i] = tf.nn.batch_normalization(network[i], 0, 1.0, 0.0, 1.0, 1e-5, 'BatchNorm') network[i] = tflearn.activations.leaky_relu(network[i]) if i == 0: network[i] = tflearn.upsample_2d(network[i], 2) else: upnet = tf.nn.batch_normalization(network[i - 1], 0, 1.0, 0.0, 1.0, 1e-5, 'BatchNorm') downnet = tf.nn.batch_normalization(network[i], 0, 1.0, 0.0, 1.0, 1e-5, 'BatchNorm') # join_i network[i] = tflearn.merge([upnet, downnet], 'concat', axis=3) # block_i_3, block_i_4, block_i_5 for block in range(3, 6): with tf.name_scope('block_%d_%d' % (i, block)): filter_size = 1 if (block + 1) % 3 == 0 else 3 network[i] = tflearn.conv_2d(network[i], nb_filter=conv_num * i, filter_size=filter_size, weights_init='xavier', name='Conv_%d_%d' % (i, block)) network[i] = tf.nn.batch_normalization( network[i], 0, 1.0, 0.0, 1.0, 1e-5, 'BatchNorm') network[i] = tflearn.activations.leaky_relu(network[i]) if i != len(ratios) - 1: network[i] = tflearn.upsample_2d(network[i], 2) network[len(ratios) - 1] = tflearn.conv_2d(network[len(ratios) - 1], nb_filter=3, filter_size=1, weights_init='xavier', name='Conv2d_out') return network[len(ratios) - 1]
def Net_in_Net1(network, scale=False): if scale is True: network = scale(network) network = conv_2d(network, 192, 5, activation='relu') network = conv_2d(network, 160, 1, activation='relu') network = conv_2d(network, 96, 1, activation='relu') network = max_pool_2d(network, 3, strides=2) network = dropout(network, 0.5) network = conv_2d(network, 192, 5, activation='relu') network = conv_2d(network, 192, 1, activation='relu') network = conv_2d(network, 192, 1, activation='relu') network = avg_pool_2d(network, 3, strides=2) network = dropout(network, 0.5) network = conv_2d(network, 192, 3, activation='relu') network = conv_2d(network, 192, 1, activation='relu') network = conv_2d(network, 10, 1, activation='relu') network = avg_pool_2d(network, 8) network = flatten(network) network = fully_connected(network, output_dim, activation='sigmoid') return network
def Half_extract(input_x, layer_name, class_num, training, drop_rate=0.2): with tf.name_scope(layer_name): output_height = int(np.shape(input_x)[1]) output_weight = int(np.shape(input_x)[2]) stripe_w2 = output_weight stripe_h2 = int(output_height / 2) local_x_list = [] logits_list = [] input_x = Batch_Normalization(input_x, training=training, scope=layer_name + '_batch_norm') input_x = tf.nn.relu(input_x, name=layer_name + 'relu') for i in range(2): # up and down local_x2 = avg_pool_2d( input_x[:, i * stripe_h2:(i + 1) * stripe_h2, :, :], (stripe_h2, stripe_w2)) local_x2 = tf.layers.dropout(inputs=local_x2, rate=drop_rate, training=training, name=layer_name + '_drop1') local_x2 = conv_layer(local_x2, filter=256, kernel=[1, 1], layer_name=layer_name + '_split_conv' + str(i)) local_x2 = Batch_Normalization(local_x2, training=training, scope=layer_name + '_batch' + str(i)) local_x2 = flatten(local_x2) local_x_list.append(local_x2) local_x2 = tf.layers.dropout(inputs=local_x2, rate=drop_rate, training=training, name=layer_name + '_drop2') local_x2 = Fully_connected(local_x2, class_num, use_bias=False, layer_name=layer_name + '_fully_connected_' + str(i)) logits_list.append(local_x2) return local_x_list, logits_list
def Part_extract(self, input_x, layer_name): with tf.name_scope(layer_name): output_height = int(np.shape(input_x)[1]) output_weight = int(np.shape(input_x)[2]) stripe_w = output_weight stripe_h = int(output_height / 6) local_x_list = [] logits_list = [] for i in range(6): # 6 parts local_x = avg_pool_2d( input_x[:, i * stripe_h:(i + 1) * stripe_h, :, :], (stripe_h, stripe_w)) # print('shape of local_x: ', np.shape(local_x)) local_x = tf.layers.dropout(inputs=local_x, rate=self.drop_rate, training=self.training, name=layer_name + '_drop1') local_x = conv_layer(local_x, filter=256, kernel=[1, 1], layer_name=layer_name + '_split_conv' + str(i)) # print('shape of local_x after conv: ', np.shape(local_x)) local_x = Batch_Normalization(local_x, training=self.training, scope=layer_name + '_batch' + str(i)) local_x = flatten(local_x) # print('shape of local_x1: ', np.shape(local_x)) local_x_list.append(local_x) local_x = tf.layers.dropout(inputs=local_x, rate=self.drop_rate, training=self.training, name=layer_name + '_drop2') local_x = Fully_connected(local_x, self.class_num, layer_name=layer_name + '_fully_connected_' + str(i)) # print('shape of local_x after FC: ', np.shape(local_x)) logits_list.append(local_x) # print('shape of logits_list: ', np.shape(logits_list)) return local_x_list, logits_list
def Part_extract(input_x, layer_name, class_num, training, num_dims=256, drop_rate=0.2): with tf.name_scope(layer_name): output_height = int(np.shape(input_x)[1]) output_weight = int(np.shape(input_x)[2]) stripe_w = output_weight stripe_h = int(output_height / 6) local_x_list = [] logits_list = [] for i in range(6): # 6 parts local_x = avg_pool_2d( input_x[:, i * stripe_h:(i + 1) * stripe_h, :, :], (stripe_h, stripe_w)) local_x = tf.layers.dropout(local_x, rate=drop_rate, training=training, name=layer_name + '_drop1') local_x = conv_layer(local_x, filter=num_dims, kernel=[1, 1], layer_name=layer_name + '_split_conv' + str(i)) local_x = Batch_Normalization(local_x, training=training, scope=layer_name + '_batch' + str(i)) local_x = flatten(local_x) local_x_list.append(local_x) # local_x = tf.layers.dropout(local_x, rate=drop_rate, training=training, # name=layer_name + '_drop2') local_x = Fully_connected(local_x, class_num, use_bias=False, layer_name=layer_name + '_fully_connected_' + str(i)) logits_list.append(local_x) return local_x_list, logits_list
def inception_block_c(input_c): inception_c_1_1 = conv_2d(input_c, 256, 1, activation='relu', name='inception_c_1_1') inception_c_3_3_reduce = conv_2d(input_c, 384, filter_size=1, activation='relu', name='inception_c_3_3_reduce') inception_c_3_3_asym_1 = conv_2d(inception_c_3_3_reduce, 256, filter_size=[1,3], activation='relu',name='inception_c_3_3_asym_1') inception_c_3_3_asym_2 = conv_2d(inception_c_3_3_reduce, 256, filter_size=[3,1], activation='relu',name='inception_c_3_3_asym_2') inception_c_3_3=merge([inception_c_3_3_asym_1,inception_c_3_3_asym_2],mode='concat',axis=3) inception_c_5_5_reduce = conv_2d(input_c, 384, filter_size=1, activation='relu', name = 'inception_c_5_5_reduce') inception_c_5_5_asym_1 = conv_2d(inception_c_5_5_reduce, 448, filter_size=[1,3], name = 'inception_c_5_5_asym_1') inception_c_5_5_asym_2 = conv_2d(inception_c_5_5_asym_1, 512, filter_size=[3,1], activation='relu',name='inception_c_5_5_asym_2') inception_c_5_5_asym_3 = conv_2d(inception_c_5_5_asym_2, 256, filter_size=[1,3], activation='relu',name='inception_c_5_5_asym_3') inception_c_5_5_asym_4 = conv_2d(inception_c_5_5_asym_2, 256, filter_size=[3,1], activation='relu',name='inception_c_5_5_asym_4') inception_c_5_5=merge([inception_c_5_5_asym_4,inception_c_5_5_asym_3],mode='concat',axis=3) inception_c_pool = avg_pool_2d(input_c, kernel_size=3, strides=1 ) inception_c_pool_1_1 = conv_2d(inception_c_pool, 256, filter_size=1, activation='relu', name='inception_c_pool_1_1') # merge the inception_c inception_c_output = merge([inception_c_1_1, inception_c_3_3, inception_c_5_5, inception_c_pool_1_1], mode='concat', axis=3) return inception_c_output
def inception_block_b(input_b): inception_b_1_1 = conv_2d(input_b, 384, 1, activation='relu', name='inception_b_1_1') inception_b_3_3_reduce = conv_2d(input_b, 192, filter_size=1, activation='relu', name='inception_b_3_3_reduce') inception_b_3_3_asym_1 = conv_2d(inception_b_3_3_reduce, 224, filter_size=[1,7], activation='relu',name='inception_b_3_3_asym_1') inception_b_3_3 = conv_2d(inception_b_3_3_asym_1, 256, filter_size=[7,1], activation='relu',name='inception_b_3_3') inception_b_5_5_reduce = conv_2d(input_b, 192, filter_size=1, activation='relu', name = 'inception_b_5_5_reduce') inception_b_5_5_asym_1 = conv_2d(inception_b_5_5_reduce, 192, filter_size=[7,1], name = 'inception_b_5_5_asym_1') inception_b_5_5_asym_2 = conv_2d(inception_b_5_5_asym_1, 224, filter_size=[1,7], name = 'inception_b_5_5_asym_2') inception_b_5_5_asym_3 = conv_2d(inception_b_5_5_asym_2, 224, filter_size=[7,1], name = 'inception_b_5_5_asym_3') inception_b_5_5 = conv_2d(inception_b_5_5_asym_3, 256, filter_size=[1,7], name = 'inception_b_5_5') inception_b_pool = avg_pool_2d(input_b, kernel_size=3, strides=1 ) inception_b_pool_1_1 = conv_2d(inception_b_pool, 128, filter_size=1, activation='relu', name='inception_b_pool_1_1') # merge the inception_b inception_b_output = merge([inception_b_1_1, inception_b_3_3, inception_b_5_5, inception_b_pool_1_1], mode='concat', axis=3) return inception_b_output
def All_extract(input_x, layer_name, class_num, training, num_dims=256, drop_rate=0.2): with tf.name_scope(layer_name): output_height = int(np.shape(input_x)[1]) output_weight = int(np.shape(input_x)[2]) local_x_list = [] logits_list = [] local_x1 = avg_pool_2d(input_x[:, :, :, :], (output_height, output_weight), name=layer_name + '_AvgPool2D') # local_x1 = tf.layers.dropout(local_x1, rate=drop_rate, training=training, # name=layer_name + '_drop1') local_x1 = conv_layer(local_x1, filter=num_dims, kernel=[1, 1], layer_name=layer_name + '_split_conv') local_x1 = Batch_Normalization(local_x1, training=training, scope=layer_name + '_batch') local_x1 = flatten(local_x1) local_x_list.append(local_x1) # local_x1 = tf.layers.dropout(local_x1, rate=drop_rate, training=training, # name=layer_name + '_drop2') local_x1 = Fully_connected(local_x1, class_num, use_bias=False, layer_name=layer_name + '_fully_connected') logits_list.append(local_x1) return local_x_list, logits_list
def getReseau(): size = settings.size nb_filter = settings.nb_filter filter_size = settings.filter_size # Make sure the data is normalized img_prep = ImagePreprocessing() img_prep.add_featurewise_zero_center() img_prep.add_featurewise_stdnorm() # Create extra synthetic training data by flipping, rotating and blurring the # images on our data set. img_aug = ImageAugmentation() img_aug.add_random_flip_leftright() img_aug.add_random_rotation(max_angle=25.) img_aug.add_random_blur(sigma_max=3.) # Define our network architecture: # Input is a 32x32 image with 3 color channels (red, green and blue) network = input_data(shape=[None, size, size, 3], data_preprocessing=img_prep, data_augmentation=img_aug) reseau = settings.reseau if reseau == 1: # Step 1: Convolution network = conv_2d(network, nb_filter, filter_size, activation='relu') # Step 2: Max pooling network = max_pool_2d(network, 2) # Step 3: Convolution again network = conv_2d(network, nb_filter * 2, filter_size, activation='relu') # Step 4: Convolution yet again network = conv_2d(network, nb_filter * 2, filter_size, activation='relu') # Step 5: Max pooling again network = max_pool_2d(network, 2) # Step 6: Fully-connected 512 node neural network network = fully_connected(network, nb_filter * 8, activation='relu') # Step 7: Dropout - throw away some data randomly during training to prevent over-fitting network = dropout(network, 0.5) elif reseau == 2: network = conv_2d(network, 32, 3, activation='relu') network = conv_2d(network, 32, 3, activation='relu') network = max_pool_2d(network, 2) network = conv_2d(network, 32, 3, activation='relu') network = conv_2d(network, 32, 3, activation='relu') network = max_pool_2d(network, 2) network = fully_connected(network, 512, activation='relu') network = fully_connected(network, 512, activation='relu') elif reseau == 3: network = conv_2d(network, 32, 3, activation='relu') network = avg_pool_2d(network, 2) network = conv_2d(network, 32, 3, activation='relu') network = avg_pool_2d(network, 2) network = conv_2d(network, 32, 3, activation='relu') network = avg_pool_2d(network, 2) network = fully_connected(network, 512, activation='relu') network = fully_connected(network, 512, activation='relu') network = dropout(network, 0.5) elif reseau == 4: network = conv_2d(network, 32, 3, activation='relu') network = conv_2d(network, 32, 3, activation='relu') network = conv_2d(network, 32, 5, padding='valid', activation='relu') network = conv_2d(network, 32, 3, activation='relu') network = conv_2d(network, 32, 3, activation='relu') network = conv_2d(network, 32, 5, padding='valid', activation='relu') network = fully_connected(network, 512, activation='relu') network = dropout(network, 0.5) elif reseau == 5: network = conv_2d(network, 64, 3, activation='relu') network = conv_2d(network, 64, 3, activation='relu') network = avg_pool_2d(network, 2) network = conv_2d(network, 32, 3, activation='relu') network = conv_2d(network, 32, 3, activation='relu') network = max_pool_2d(network, 2) network = fully_connected(network, 512, activation='relu') network = fully_connected(network, 512, activation='relu') # Step 8: Fully-connected neural network with three outputs (0=isn't a bird, 1=is a bird) to make the final prediction network = fully_connected(network, ld.getLabelsNumber(), activation='softmax') # Tell tflearn how we want to train the network network = regression(network, optimizer='adam', loss='categorical_crossentropy', learning_rate=settings.learning_rate) # Wrap the network in a model object # model = tflearn.DNN(network, tensorboard_verbose=0, checkpoint_path='dataviz-classifier.tfl.ckpt') model = tflearn.DNN( network, tensorboard_verbose=0 ) # , checkpoint_path='data-classifier/dataviz-classifier.tfl.ckpt') return model
activation='prelu', name='conv4_3') pool4_3 = max_pool_2d(conv4_3, 3, strides=2) #--------------------------------------------------------- inception_5_output = merge([inception_4_1, inception_4_2, pool4_3], mode='concat', axis=3) pool5 = max_pool_2d(inception_5_output, kernel_size=5, strides=3) conv5 = conv_2d(pool5, 64, 2, strides=1, activation='prelu', name='conv5') conv5 = local_response_normalization(conv5) conv5 = conv_2d(conv5, 64, 2, strides=1, activation='prelu') conv5 = batch_normalization(conv5) pool6 = avg_pool_2d(conv5, 2, strides=1) net = dropout(pool6, DROPOUT_PROB) loss = fully_connected(net, NUM_CLASS, activation='softmax') network2 = regression(loss, optimizer='momentum', loss='categorical_crossentropy', learning_rate=0.001) model = tflearn.DNN(network2, tensorboard_dir='/home/guest/dujinhong/model1/', checkpoint_path='/home/guest/dujinhong/model1/', max_checkpoints=1, tensorboard_verbose=0) model.load('/home/guest/dujinhong/model1/DNN_trainer2', verbose=True) model.fit(train_data,
def inception_v2(width, height, learning_rate): num_classes = 17 dropout_keep_prob = 0.8 network = input_data(shape=[None, width, height, 3]) conv1a_3_3 = relu( batch_normalization( conv_2d(network, 32, 3, strides=2, bias=False, padding='VALID', activation=None, name='Conv2d_1a_3x3'))) conv2a_3_3 = relu( batch_normalization( conv_2d(conv1a_3_3, 32, 3, bias=False, padding='VALID', activation=None, name='Conv2d_2a_3x3'))) conv2b_3_3 = relu( batch_normalization( conv_2d(conv2a_3_3, 64, 3, bias=False, activation=None, name='Conv2d_2b_3x3'))) maxpool3a_3_3 = max_pool_2d(conv2b_3_3, 3, strides=2, padding='VALID', name='MaxPool_3a_3x3') conv3b_1_1 = relu( batch_normalization( conv_2d(maxpool3a_3_3, 80, 1, bias=False, padding='VALID', activation=None, name='Conv2d_3b_1x1'))) conv4a_3_3 = relu( batch_normalization( conv_2d(conv3b_1_1, 192, 3, bias=False, padding='VALID', activation=None, name='Conv2d_4a_3x3'))) maxpool5a_3_3 = max_pool_2d(conv4a_3_3, 3, strides=2, padding='VALID', name='MaxPool_5a_3x3') tower_conv = relu( batch_normalization( conv_2d(maxpool5a_3_3, 96, 1, bias=False, activation=None, name='Conv2d_5b_b0_1x1'))) tower_conv1_0 = relu( batch_normalization( conv_2d(maxpool5a_3_3, 48, 1, bias=False, activation=None, name='Conv2d_5b_b1_0a_1x1'))) tower_conv1_1 = relu( batch_normalization( conv_2d(tower_conv1_0, 64, 5, bias=False, activation=None, name='Conv2d_5b_b1_0b_5x5'))) tower_conv2_0 = relu( batch_normalization( conv_2d(maxpool5a_3_3, 64, 1, bias=False, activation=None, name='Conv2d_5b_b2_0a_1x1'))) tower_conv2_1 = relu( batch_normalization( conv_2d(tower_conv2_0, 96, 3, bias=False, activation=None, name='Conv2d_5b_b2_0b_3x3'))) tower_conv2_2 = relu( batch_normalization( conv_2d(tower_conv2_1, 96, 3, bias=False, activation=None, name='Conv2d_5b_b2_0c_3x3'))) tower_pool3_0 = avg_pool_2d(maxpool5a_3_3, 3, strides=1, padding='same', name='AvgPool_5b_b3_0a_3x3') tower_conv3_1 = relu( batch_normalization( conv_2d(tower_pool3_0, 64, 1, bias=False, activation=None, name='Conv2d_5b_b3_0b_1x1'))) tower_5b_out = merge( [tower_conv, tower_conv1_1, tower_conv2_2, tower_conv3_1], mode='concat', axis=3) net = repeat(tower_5b_out, 10, block35, scale=0.17) tower_conv = relu( batch_normalization( conv_2d(net, 384, 3, bias=False, strides=2, activation=None, padding='VALID', name='Conv2d_6a_b0_0a_3x3'))) tower_conv1_0 = relu( batch_normalization( conv_2d(net, 256, 1, bias=False, activation=None, name='Conv2d_6a_b1_0a_1x1'))) tower_conv1_1 = relu( batch_normalization( conv_2d(tower_conv1_0, 256, 3, bias=False, activation=None, name='Conv2d_6a_b1_0b_3x3'))) tower_conv1_2 = relu( batch_normalization( conv_2d(tower_conv1_1, 384, 3, bias=False, strides=2, padding='VALID', activation=None, name='Conv2d_6a_b1_0c_3x3'))) tower_pool = max_pool_2d(net, 3, strides=2, padding='VALID', name='MaxPool_1a_3x3') net = merge([tower_conv, tower_conv1_2, tower_pool], mode='concat', axis=3) net = repeat(net, 20, block17, scale=0.1) tower_conv = relu( batch_normalization( conv_2d(net, 256, 1, bias=False, activation=None, name='Conv2d_0a_1x1'))) tower_conv0_1 = relu( batch_normalization( conv_2d(tower_conv, 384, 3, bias=False, strides=2, padding='VALID', activation=None, name='Conv2d_0a_1x1'))) tower_conv1 = relu( batch_normalization( conv_2d(net, 256, 1, bias=False, padding='VALID', activation=None, name='Conv2d_0a_1x1'))) tower_conv1_1 = relu( batch_normalization( conv_2d(tower_conv1, 288, 3, bias=False, strides=2, padding='VALID', activation=None, name='COnv2d_1a_3x3'))) tower_conv2 = relu( batch_normalization( conv_2d(net, 256, 1, bias=False, activation=None, name='Conv2d_0a_1x1'))) tower_conv2_1 = relu( batch_normalization( conv_2d(tower_conv2, 288, 3, bias=False, name='Conv2d_0b_3x3', activation=None))) tower_conv2_2 = relu( batch_normalization( conv_2d(tower_conv2_1, 320, 3, bias=False, strides=2, padding='VALID', activation=None, name='Conv2d_1a_3x3'))) tower_pool = max_pool_2d(net, 3, strides=2, padding='VALID', name='MaxPool_1a_3x3') net = merge([tower_conv0_1, tower_conv1_1, tower_conv2_2, tower_pool], mode='concat', axis=3) net = repeat(net, 9, block8, scale=0.2) net = block8(net, activation=None) net = relu( batch_normalization( conv_2d(net, 1536, 1, bias=False, activation=None, name='Conv2d_7b_1x1'))) net = avg_pool_2d(net, net.get_shape().as_list()[1:3], strides=2, padding='VALID', name='AvgPool_1a_8x8') net = flatten(net) net = dropout(net, dropout_keep_prob) loss = fully_connected(net, num_classes, activation='softmax') network = tflearn.regression(loss, optimizer='RMSprop', loss='categorical_crossentropy', learning_rate=learning_rate) #learning_rate=0.0001) model = tflearn.DNN(network, checkpoint_path='inception_resnet_v2', max_checkpoints=1, tensorboard_verbose=2, tensorboard_dir="./tflearn_logs/") return model
(X, Y), (X_test, Y_test) = cifar10.load_data() X, Y = shuffle(X, Y) Y = to_categorical(Y, 10) Y_test = to_categorical(Y_test, 10) # Building 'Network In Network' network = input_data(shape=[None, 32, 32, 3]) network = conv_2d(network, 192, 5, activation='relu') network = conv_2d(network, 160, 1, activation='relu') network = conv_2d(network, 96, 1, activation='relu') network = max_pool_2d(network, 3, strides=2) network = dropout(network, 0.5) network = conv_2d(network, 192, 5, activation='relu') network = conv_2d(network, 192, 1, activation='relu') network = conv_2d(network, 192, 1, activation='relu') network = avg_pool_2d(network, 3, strides=2) network = dropout(network, 0.5) network = conv_2d(network, 192, 3, activation='relu') network = conv_2d(network, 192, 1, activation='relu') network = conv_2d(network, 10, 1, activation='relu') network = avg_pool_2d(network, 8) network = flatten(network) network = regression(network, optimizer='adam', loss='softmax_categorical_crossentropy', learning_rate=0.001) # training model = tflearn.DNN(network) model.fit(X, Y,
def _model5(): global yTest, img_aug tf.reset_default_graph() img_prep = ImagePreprocessing() img_prep.add_featurewise_zero_center() img_prep.add_featurewise_stdnorm() def block35(net, scale=1.0, activation="relu"): tower_conv = relu(batch_normalization(conv_2d(net, 32, 1, bias=False, activation=None, name='Conv2d_1x1'))) tower_conv1_0 = relu(batch_normalization(conv_2d(net, 32, 1, bias=False, activation=None,name='Conv2d_0a_1x1'))) tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1_0, 32, 3, bias=False, activation=None,name='Conv2d_0b_3x3'))) tower_conv2_0 = relu(batch_normalization(conv_2d(net, 32, 1, bias=False, activation=None, name='Conv2d_0a_1x1'))) tower_conv2_1 = relu(batch_normalization(conv_2d(tower_conv2_0, 48,3, bias=False, activation=None, name='Conv2d_0b_3x3'))) tower_conv2_2 = relu(batch_normalization(conv_2d(tower_conv2_1, 64,3, bias=False, activation=None, name='Conv2d_0c_3x3'))) tower_mixed = merge([tower_conv, tower_conv1_1, tower_conv2_2], mode='concat', axis=3) tower_out = relu(batch_normalization(conv_2d(tower_mixed, net.get_shape()[3], 1, bias=False, activation=None, name='Conv2d_1x1'))) net += scale * tower_out if activation: if isinstance(activation, str): net = activations.get(activation)(net) elif hasattr(activation, '__call__'): net = activation(net) else: raise ValueError("Invalid Activation.") return net def block17(net, scale=1.0, activation="relu"): tower_conv = relu(batch_normalization(conv_2d(net, 192, 1, bias=False, activation=None, name='Conv2d_1x1'))) tower_conv_1_0 = relu(batch_normalization(conv_2d(net, 128, 1, bias=False, activation=None, name='Conv2d_0a_1x1'))) tower_conv_1_1 = relu(batch_normalization(conv_2d(tower_conv_1_0, 160,[1,7], bias=False, activation=None,name='Conv2d_0b_1x7'))) tower_conv_1_2 = relu(batch_normalization(conv_2d(tower_conv_1_1, 192, [7,1], bias=False, activation=None,name='Conv2d_0c_7x1'))) tower_mixed = merge([tower_conv,tower_conv_1_2], mode='concat', axis=3) tower_out = relu(batch_normalization(conv_2d(tower_mixed, net.get_shape()[3], 1, bias=False, activation=None, name='Conv2d_1x1'))) net += scale * tower_out if activation: if isinstance(activation, str): net = activations.get(activation)(net) elif hasattr(activation, '__call__'): net = activation(net) else: raise ValueError("Invalid Activation.") return net def block8(net, scale=1.0, activation="relu"): tower_conv = relu(batch_normalization(conv_2d(net, 192, 1, bias=False, activation=None, name='Conv2d_1x1'))) tower_conv1_0 = relu(batch_normalization(conv_2d(net, 192, 1, bias=False, activation=None, name='Conv2d_0a_1x1'))) tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1_0, 224, [1,3], bias=False, activation=None, name='Conv2d_0b_1x3'))) tower_conv1_2 = relu(batch_normalization(conv_2d(tower_conv1_1, 256, [3,1], bias=False, name='Conv2d_0c_3x1'))) tower_mixed = merge([tower_conv,tower_conv1_2], mode='concat', axis=3) tower_out = relu(batch_normalization(conv_2d(tower_mixed, net.get_shape()[3], 1, bias=False, activation=None, name='Conv2d_1x1'))) net += scale * tower_out if activation: if isinstance(activation, str): net = activations.get(activation)(net) elif hasattr(activation, '__call__'): net = activation(net) else: raise ValueError("Invalid Activation.") return net num_classes = len(Y[0]) dropout_keep_prob = 0.8 network = input_data(shape=[None, inputSize, inputSize, dim], name='input', data_preprocessing=img_prep, data_augmentation=img_aug) conv1a_3_3 = relu(batch_normalization(conv_2d(network, 32, 3, strides=2, bias=False, padding='VALID',activation=None,name='Conv2d_1a_3x3'))) conv2a_3_3 = relu(batch_normalization(conv_2d(conv1a_3_3, 32, 3, bias=False, padding='VALID',activation=None, name='Conv2d_2a_3x3'))) conv2b_3_3 = relu(batch_normalization(conv_2d(conv2a_3_3, 64, 3, bias=False, activation=None, name='Conv2d_2b_3x3'))) maxpool3a_3_3 = max_pool_2d(conv2b_3_3, 3, strides=2, padding='VALID', name='MaxPool_3a_3x3') conv3b_1_1 = relu(batch_normalization(conv_2d(maxpool3a_3_3, 80, 1, bias=False, padding='VALID',activation=None, name='Conv2d_3b_1x1'))) conv4a_3_3 = relu(batch_normalization(conv_2d(conv3b_1_1, 192, 3, bias=False, padding='VALID',activation=None, name='Conv2d_4a_3x3'))) maxpool5a_3_3 = max_pool_2d(conv4a_3_3, 3, strides=2, padding='VALID', name='MaxPool_5a_3x3') tower_conv = relu(batch_normalization(conv_2d(maxpool5a_3_3, 96, 1, bias=False, activation=None, name='Conv2d_5b_b0_1x1'))) tower_conv1_0 = relu(batch_normalization(conv_2d(maxpool5a_3_3, 48, 1, bias=False, activation=None, name='Conv2d_5b_b1_0a_1x1'))) tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1_0, 64, 5, bias=False, activation=None, name='Conv2d_5b_b1_0b_5x5'))) tower_conv2_0 = relu(batch_normalization(conv_2d(maxpool5a_3_3, 64, 1, bias=False, activation=None, name='Conv2d_5b_b2_0a_1x1'))) tower_conv2_1 = relu(batch_normalization(conv_2d(tower_conv2_0, 96, 3, bias=False, activation=None, name='Conv2d_5b_b2_0b_3x3'))) tower_conv2_2 = relu(batch_normalization(conv_2d(tower_conv2_1, 96, 3, bias=False, activation=None,name='Conv2d_5b_b2_0c_3x3'))) tower_pool3_0 = avg_pool_2d(maxpool5a_3_3, 3, strides=1, padding='same', name='AvgPool_5b_b3_0a_3x3') tower_conv3_1 = relu(batch_normalization(conv_2d(tower_pool3_0, 64, 1, bias=False, activation=None,name='Conv2d_5b_b3_0b_1x1'))) tower_5b_out = merge([tower_conv, tower_conv1_1, tower_conv2_2, tower_conv3_1], mode='concat', axis=3) net = repeat(tower_5b_out, 10, block35, scale=0.17) ''' tower_conv = relu(batch_normalization(conv_2d(net, 384, 3, bias=False, strides=2,activation=None, padding='VALID', name='Conv2d_6a_b0_0a_3x3'))) tower_conv1_0 = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, activation=None, name='Conv2d_6a_b1_0a_1x1'))) tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1_0, 256, 3, bias=False, activation=None, name='Conv2d_6a_b1_0b_3x3'))) tower_conv1_2 = relu(batch_normalization(conv_2d(tower_conv1_1, 384, 3, bias=False, strides=2, padding='VALID', activation=None,name='Conv2d_6a_b1_0c_3x3'))) tower_pool = max_pool_2d(net, 3, strides=2, padding='VALID',name='MaxPool_1a_3x3') net = merge([tower_conv, tower_conv1_2, tower_pool], mode='concat', axis=3) net = repeat(net, 20, block17, scale=0.1) tower_conv = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, activation=None, name='Conv2d_0a_1x1'))) tower_conv0_1 = relu(batch_normalization(conv_2d(tower_conv, 384, 3, bias=False, strides=2, padding='VALID', activation=None,name='Conv2d_0a_1x1'))) tower_conv1 = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, padding='VALID', activation=None,name='Conv2d_0a_1x1'))) tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1,288,3, bias=False, strides=2, padding='VALID',activation=None, name='COnv2d_1a_3x3'))) tower_conv2 = relu(batch_normalization(conv_2d(net, 256,1, bias=False, activation=None,name='Conv2d_0a_1x1'))) tower_conv2_1 = relu(batch_normalization(conv_2d(tower_conv2, 288,3, bias=False, name='Conv2d_0b_3x3',activation=None))) tower_conv2_2 = relu(batch_normalization(conv_2d(tower_conv2_1, 320, 3, bias=False, strides=2, padding='VALID',activation=None, name='Conv2d_1a_3x3'))) tower_pool = max_pool_2d(net, 3, strides=2, padding='VALID', name='MaxPool_1a_3x3') ''' tower_conv = relu(batch_normalization(conv_2d(net, 384, 1, bias=False, strides=2,activation=None, padding='VALID', name='Conv2d_6a_b0_0a_3x3'))) tower_conv1_0 = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, activation=None, name='Conv2d_6a_b1_0a_1x1'))) tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1_0, 256, 1, bias=False, activation=None, name='Conv2d_6a_b1_0b_3x3'))) tower_conv1_2 = relu(batch_normalization(conv_2d(tower_conv1_1, 384, 1, bias=False, strides=2, padding='VALID', activation=None,name='Conv2d_6a_b1_0c_3x3'))) tower_pool = max_pool_2d(net, 1, strides=2, padding='VALID',name='MaxPool_1a_3x3') net = merge([tower_conv, tower_conv1_2, tower_pool], mode='concat', axis=3) net = repeat(net, 20, block17, scale=0.1) tower_conv = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, activation=None, name='Conv2d_0a_1x1'))) tower_conv0_1 = relu(batch_normalization(conv_2d(tower_conv, 384, 1, bias=False, strides=2, padding='VALID', activation=None,name='Conv2d_0a_1x1'))) tower_conv1 = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, padding='VALID', activation=None,name='Conv2d_0a_1x1'))) tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1,288,1, bias=False, strides=2, padding='VALID',activation=None, name='COnv2d_1a_3x3'))) tower_conv2 = relu(batch_normalization(conv_2d(net, 256,1, bias=False, activation=None,name='Conv2d_0a_1x1'))) tower_conv2_1 = relu(batch_normalization(conv_2d(tower_conv2, 288,1, bias=False, name='Conv2d_0b_3x3',activation=None))) tower_conv2_2 = relu(batch_normalization(conv_2d(tower_conv2_1, 320, 1, bias=False, strides=2, padding='VALID',activation=None, name='Conv2d_1a_3x3'))) tower_pool = max_pool_2d(net, 1, strides=2, padding='VALID', name='MaxPool_1a_3x3') #### net = merge([tower_conv0_1, tower_conv1_1,tower_conv2_2, tower_pool], mode='concat', axis=3) net = repeat(net, 9, block8, scale=0.2) net = block8(net, activation=None) net = relu(batch_normalization(conv_2d(net, 1536, 1, bias=False, activation=None, name='Conv2d_7b_1x1'))) net = avg_pool_2d(net, net.get_shape().as_list()[1:3],strides=2, padding='VALID', name='AvgPool_1a_8x8') net = flatten(net) net = dropout(net, dropout_keep_prob) loss = fully_connected(net, num_classes,activation='softmax') network = tflearn.regression(loss, optimizer='RMSprop', loss='categorical_crossentropy', learning_rate=0.0001) model = tflearn.DNN(network, checkpoint_path='inception_resnet_v2', max_checkpoints=1, tensorboard_verbose=2, tensorboard_dir="./tflearn_logs/") model.fit(X, Y, n_epoch=epochNum, validation_set=(xTest, yTest), shuffle=True, show_metric=True, batch_size=batchNum, snapshot_step=2000, snapshot_epoch=False, run_id='inception_resnet_v2_oxflowers17') if modelStore: model.save(_id + '-model.tflearn')
def ImageNetInceptionV2(outnode, model_name, target, opt, learn_r, epch, dropout_keep_rate, save_model=False): def block35(net, scale=1.0, activation="relu"): tower_conv = relu( batch_normalization( conv_2d(net, 32, 1, bias=False, activation=None, name='Conv2d_1x1'))) tower_conv1_0 = relu( batch_normalization( conv_2d(net, 32, 1, bias=False, activation=None, name='Conv2d_0a_1x1'))) tower_conv1_1 = relu( batch_normalization( conv_2d(tower_conv1_0, 32, 3, bias=False, activation=None, name='Conv2d_0b_3x3'))) tower_conv2_0 = relu( batch_normalization( conv_2d(net, 32, 1, bias=False, activation=None, name='Conv2d_0a_1x1'))) tower_conv2_1 = relu( batch_normalization( conv_2d(tower_conv2_0, 48, 3, bias=False, activation=None, name='Conv2d_0b_3x3'))) tower_conv2_2 = relu( batch_normalization( conv_2d(tower_conv2_1, 64, 3, bias=False, activation=None, name='Conv2d_0c_3x3'))) tower_mixed = merge([tower_conv, tower_conv1_1, tower_conv2_2], mode='concat', axis=3) tower_out = relu( batch_normalization( conv_2d(tower_mixed, net.get_shape()[3], 1, bias=False, activation=None, name='Conv2d_1x1'))) net += scale * tower_out if activation: if isinstance(activation, str): net = activations.get(activation)(net) elif hasattr(activation, '__call__'): net = activation(net) else: raise ValueError("Invalid Activation.") return net def block17(net, scale=1.0, activation="relu"): tower_conv = relu( batch_normalization( conv_2d(net, 192, 1, bias=False, activation=None, name='Conv2d_1x1'))) tower_conv_1_0 = relu( batch_normalization( conv_2d(net, 128, 1, bias=False, activation=None, name='Conv2d_0a_1x1'))) tower_conv_1_1 = relu( batch_normalization( conv_2d(tower_conv_1_0, 160, [1, 7], bias=False, activation=None, name='Conv2d_0b_1x7'))) tower_conv_1_2 = relu( batch_normalization( conv_2d(tower_conv_1_1, 192, [7, 1], bias=False, activation=None, name='Conv2d_0c_7x1'))) tower_mixed = merge([tower_conv, tower_conv_1_2], mode='concat', axis=3) tower_out = relu( batch_normalization( conv_2d(tower_mixed, net.get_shape()[3], 1, bias=False, activation=None, name='Conv2d_1x1'))) net += scale * tower_out if activation: if isinstance(activation, str): net = activations.get(activation)(net) elif hasattr(activation, '__call__'): net = activation(net) else: raise ValueError("Invalid Activation.") return net def block8(net, scale=1.0, activation="relu"): tower_conv = relu( batch_normalization( conv_2d(net, 192, 1, bias=False, activation=None, name='Conv2d_1x1'))) tower_conv1_0 = relu( batch_normalization( conv_2d(net, 192, 1, bias=False, activation=None, name='Conv2d_0a_1x1'))) tower_conv1_1 = relu( batch_normalization( conv_2d(tower_conv1_0, 224, [1, 3], bias=False, activation=None, name='Conv2d_0b_1x3'))) tower_conv1_2 = relu( batch_normalization( conv_2d(tower_conv1_1, 256, [3, 1], bias=False, name='Conv2d_0c_3x1'))) tower_mixed = merge([tower_conv, tower_conv1_2], mode='concat', axis=3) tower_out = relu( batch_normalization( conv_2d(tower_mixed, net.get_shape()[3], 1, bias=False, activation=None, name='Conv2d_1x1'))) net += scale * tower_out if activation: if isinstance(activation, str): net = activations.get(activation)(net) elif hasattr(activation, '__call__'): net = activation(net) else: raise ValueError("Invalid Activation.") return net # default = 0.8 dropout_keep_prob = dropout_keep_rate network = input_data(shape=[None, IMG_SIZE, IMG_SIZE, 1], name='input') conv1a_3_3 = relu( batch_normalization( conv_2d(network, 32, 3, strides=2, bias=False, padding='VALID', activation=None, name='Conv2d_1a_3x3'))) conv2a_3_3 = relu( batch_normalization( conv_2d(conv1a_3_3, 32, 3, bias=False, padding='VALID', activation=None, name='Conv2d_2a_3x3'))) conv2b_3_3 = relu( batch_normalization( conv_2d(conv2a_3_3, 64, 3, bias=False, activation=None, name='Conv2d_2b_3x3'))) maxpool3a_3_3 = max_pool_2d(conv2b_3_3, 3, strides=2, padding='VALID', name='MaxPool_3a_3x3') conv3b_1_1 = relu( batch_normalization( conv_2d(maxpool3a_3_3, 80, 1, bias=False, padding='VALID', activation=None, name='Conv2d_3b_1x1'))) conv4a_3_3 = relu( batch_normalization( conv_2d(conv3b_1_1, 192, 3, bias=False, padding='VALID', activation=None, name='Conv2d_4a_3x3'))) maxpool5a_3_3 = max_pool_2d(conv4a_3_3, 3, strides=2, padding='VALID', name='MaxPool_5a_3x3') tower_conv = relu( batch_normalization( conv_2d(maxpool5a_3_3, 96, 1, bias=False, activation=None, name='Conv2d_5b_b0_1x1'))) tower_conv1_0 = relu( batch_normalization( conv_2d(maxpool5a_3_3, 48, 1, bias=False, activation=None, name='Conv2d_5b_b1_0a_1x1'))) tower_conv1_1 = relu( batch_normalization( conv_2d(tower_conv1_0, 64, 5, bias=False, activation=None, name='Conv2d_5b_b1_0b_5x5'))) tower_conv2_0 = relu( batch_normalization( conv_2d(maxpool5a_3_3, 64, 1, bias=False, activation=None, name='Conv2d_5b_b2_0a_1x1'))) tower_conv2_1 = relu( batch_normalization( conv_2d(tower_conv2_0, 96, 3, bias=False, activation=None, name='Conv2d_5b_b2_0b_3x3'))) tower_conv2_2 = relu( batch_normalization( conv_2d(tower_conv2_1, 96, 3, bias=False, activation=None, name='Conv2d_5b_b2_0c_3x3'))) tower_pool3_0 = avg_pool_2d(maxpool5a_3_3, 3, strides=1, padding='same', name='AvgPool_5b_b3_0a_3x3') tower_conv3_1 = relu( batch_normalization( conv_2d(tower_pool3_0, 64, 1, bias=False, activation=None, name='Conv2d_5b_b3_0b_1x1'))) tower_5b_out = merge( [tower_conv, tower_conv1_1, tower_conv2_2, tower_conv3_1], mode='concat', axis=3) net = repeat(tower_5b_out, 10, block35, scale=0.17) tower_conv = relu( batch_normalization( conv_2d(net, 384, 3, bias=False, strides=2, activation=None, padding='VALID', name='Conv2d_6a_b0_0a_3x3'))) tower_conv1_0 = relu( batch_normalization( conv_2d(net, 256, 1, bias=False, activation=None, name='Conv2d_6a_b1_0a_1x1'))) tower_conv1_1 = relu( batch_normalization( conv_2d(tower_conv1_0, 256, 3, bias=False, activation=None, name='Conv2d_6a_b1_0b_3x3'))) tower_conv1_2 = relu( batch_normalization( conv_2d(tower_conv1_1, 384, 3, bias=False, strides=2, padding='VALID', activation=None, name='Conv2d_6a_b1_0c_3x3'))) tower_pool = max_pool_2d(net, 3, strides=2, padding='VALID', name='MaxPool_1a_3x3') net = merge([tower_conv, tower_conv1_2, tower_pool], mode='concat', axis=3) net = repeat(net, 20, block17, scale=0.1) tower_conv = relu( batch_normalization( conv_2d(net, 256, 1, bias=False, activation=None, name='Conv2d_0a_1x1'))) tower_conv0_1 = relu( batch_normalization( conv_2d(tower_conv, 384, 3, bias=False, strides=2, padding='VALID', activation=None, name='Conv2d_0a_1x1'))) tower_conv1 = relu( batch_normalization( conv_2d(net, 256, 1, bias=False, padding='VALID', activation=None, name='Conv2d_0a_1x1'))) tower_conv1_1 = relu( batch_normalization( conv_2d(tower_conv1, 288, 3, bias=False, strides=2, padding='VALID', activation=None, name='COnv2d_1a_3x3'))) tower_conv2 = relu( batch_normalization( conv_2d(net, 256, 1, bias=False, activation=None, name='Conv2d_0a_1x1'))) tower_conv2_1 = relu( batch_normalization( conv_2d(tower_conv2, 288, 3, bias=False, name='Conv2d_0b_3x3', activation=None))) tower_conv2_2 = relu( batch_normalization( conv_2d(tower_conv2_1, 320, 3, bias=False, strides=2, padding='VALID', activation=None, name='Conv2d_1a_3x3'))) tower_pool = max_pool_2d(net, 3, strides=2, padding='VALID', name='MaxPool_1a_3x3') net = merge([tower_conv0_1, tower_conv1_1, tower_conv2_2, tower_pool], mode='concat', axis=3) net = repeat(net, 9, block8, scale=0.2) net = block8(net, activation=None) net = relu( batch_normalization( conv_2d(net, 1536, 1, bias=False, activation=None, name='Conv2d_7b_1x1'))) net = avg_pool_2d(net, net.get_shape().as_list()[1:3], strides=2, padding='VALID', name='AvgPool_1a_8x8') net = flatten(net) net = dropout(net, dropout_keep_prob) loss = fully_connected(net, outnode, activation='softmax') str_model_name = "{}_{}_{}_{}_{}_{}".format(model_name, target, opt, learn_r, epch, dropout_keep_rate) network = tflearn.regression(loss, optimizer=opt, loss='categorical_crossentropy', learning_rate=learn_r, name='targets') model = None if save_model: model = tflearn.DNN( network, checkpoint_path='../tflearnModels/{}'.format(str_model_name), best_checkpoint_path='../tflearnModels/bestModels/best_{}'.format( str_model_name), max_checkpoints=1, tensorboard_verbose=0, tensorboard_dir="../tflearnLogs/{}/".format(str_model_name)) else: model = tflearn.DNN(network) return model
def __init__(self, img_size, label_size, channels): # Image input for training self.img_size = img_size # Label input for training self.label_size = label_size # Channels input for training self.channels = channels # GoogleNet algorithm self.network = input_data(shape=[None, img_size, img_size, channels], name='data') self.conv1_7x7_s2 = conv_2d(self.network, 64, 7, strides=2, activation='relu', name='conv1_7x7_s2') self.pool1_3x3_s2 = max_pool_2d(self.conv1_7x7_s2, 3, strides=2, name='pool1_3x3_s2') # Special case self.pool1_3x3_s2 = local_response_normalization(self.pool1_3x3_s2, name='pool1_norm1') self.conv2_3x3_reduce = conv_2d(self.pool1_3x3_s2, 64, 1, activation='relu', name='conv2_3x3_reduce') self.conv2_3x3 = conv_2d(self.conv2_3x3_reduce, 192, 3, activation='relu', name='conv2_3x3') # Special case self.conv2_3x3 = local_response_normalization(self.conv2_3x3, name='conv2_norm2') self.pool2_3x3_s2 = max_pool_2d(self.conv2_3x3, kernel_size=3, strides=2, name='pool2_3x3_s2') self.inception_3a_1x1 = conv_2d(self.pool2_3x3_s2, 64, 1, activation='relu', name='inception_3a_1x1') self.inception_3a_3x3_reduce = conv_2d(self.pool2_3x3_s2, 96, 1, activation='relu', name='inception_3a_3x3_reduce') self.inception_3a_3x3 = conv_2d(self.inception_3a_3x3_reduce, 128, filter_size=3, activation='relu', name='inception_3a_3x3') self.inception_3a_5x5_reduce = conv_2d(self.pool2_3x3_s2, 16, filter_size=1, activation='relu', name='inception_3a_5x5_reduce') self.inception_3a_5x5 = conv_2d(self.inception_3a_5x5_reduce, 32, filter_size=5, activation='relu', name='inception_3a_5x5') self.inception_3a_pool = max_pool_2d(self.pool2_3x3_s2, kernel_size=3, strides=1, name='inception_3a_pool') self.inception_3a_pool_proj = conv_2d(self.inception_3a_pool, 32, filter_size=1, activation='relu', name='inception_3a_pool_proj') # Merge the inception 3a self.inception_3a_output = merge([ self.inception_3a_1x1, self.inception_3a_3x3, self.inception_3a_5x5, self.inception_3a_pool_proj ], mode='concat', axis=3, name='inception_3a_output') self.inception_3b_1x1 = conv_2d(self.inception_3a_output, 128, filter_size=1, activation='relu', name='inception_3b_1x1') self.inception_3b_3x3_reduce = conv_2d(self.inception_3a_output, 128, filter_size=1, activation='relu', name='inception_3b_3x3_reduce') self.inception_3b_3x3 = conv_2d(self.inception_3b_3x3_reduce, 192, filter_size=3, activation='relu', name='inception_3b_3x3') self.inception_3b_5x5_reduce = conv_2d(self.inception_3a_output, 32, filter_size=1, activation='relu', name='inception_3b_5x5_reduce') self.inception_3b_5x5 = conv_2d(self.inception_3b_5x5_reduce, 96, filter_size=5, name='inception_3b_5x5') self.inception_3b_pool = max_pool_2d(self.inception_3a_output, kernel_size=3, strides=1, name='inception_3b_pool') self.inception_3b_pool_proj = conv_2d(self.inception_3b_pool, 64, filter_size=1, activation='relu', name='inception_3b_pool_proj') # Merge the inception 3b self.inception_3b_output = merge([ self.inception_3b_1x1, self.inception_3b_3x3, self.inception_3b_5x5, self.inception_3b_pool_proj ], mode='concat', axis=3, name='inception_3b_output') self.pool3_3x3_s2 = max_pool_2d(self.inception_3b_output, kernel_size=3, strides=2, name='pool3_3x3_s2') self.inception_4a_1x1 = conv_2d(self.pool3_3x3_s2, 192, filter_size=1, activation='relu', name='inception_4a_1x1') self.inception_4a_3x3_reduce = conv_2d(self.pool3_3x3_s2, 96, filter_size=1, activation='relu', name='inception_4a_3x3_reduce') self.inception_4a_3x3 = conv_2d(self.inception_4a_3x3_reduce, 208, filter_size=3, activation='relu', name='inception_4a_3x3') self.inception_4a_5x5_reduce = conv_2d(self.pool3_3x3_s2, 16, filter_size=1, activation='relu', name='inception_4a_5x5_reduce') self.inception_4a_5x5 = conv_2d(self.inception_4a_5x5_reduce, 48, filter_size=5, activation='relu', name='inception_4a_5x5') self.inception_4a_pool = max_pool_2d(self.pool3_3x3_s2, kernel_size=3, strides=1, name='inception_4a_pool') self.inception_4a_pool_proj = conv_2d(self.inception_4a_pool, 64, filter_size=1, activation='relu', name='inception_4a_pool_proj') self.inception_4a_output = merge([ self.inception_4a_1x1, self.inception_4a_3x3, self.inception_4a_5x5, self.inception_4a_pool_proj ], mode='concat', axis=3, name='inception_4a_output') self.inception_4b_1x1 = conv_2d(self.inception_4a_output, 160, filter_size=1, activation='relu', name='inception_4b_1x1') self.inception_4b_3x3_reduce = conv_2d(self.inception_4a_output, 112, filter_size=1, activation='relu', name='inception_4b_3x3_reduce') self.inception_4b_3x3 = conv_2d(self.inception_4b_3x3_reduce, 224, filter_size=3, activation='relu', name='inception_4b_3x3') self.inception_4b_5x5_reduce = conv_2d(self.inception_4a_output, 24, filter_size=1, activation='relu', name='inception_4b_5x5_reduce') self.inception_4b_5x5 = conv_2d(self.inception_4b_5x5_reduce, 64, filter_size=5, activation='relu', name='inception_4b_5x5') self.inception_4b_pool = max_pool_2d(self.inception_4a_output, kernel_size=3, strides=1, name='inception_4b_pool') self.inception_4b_pool_proj = conv_2d(self.inception_4b_pool, 64, filter_size=1, activation='relu', name='inception_4b_pool_proj') self.inception_4b_output = merge([ self.inception_4b_1x1, self.inception_4b_3x3, self.inception_4b_5x5, self.inception_4b_pool_proj ], mode='concat', axis=3, name='inception_4b_output') self.inception_4c_1x1 = conv_2d(self.inception_4b_output, 128, filter_size=1, activation='relu', name='inception_4c_1x1') self.inception_4c_3x3_reduce = conv_2d(self.inception_4b_output, 128, filter_size=1, activation='relu', name='inception_4c_3x3_reduce') self.inception_4c_3x3 = conv_2d(self.inception_4c_3x3_reduce, 256, filter_size=3, activation='relu', name='inception_4c_3x3') self.inception_4c_5x5_reduce = conv_2d(self.inception_4b_output, 24, filter_size=1, activation='relu', name='inception_4c_5x5_reduce') self.inception_4c_5x5 = conv_2d(self.inception_4c_5x5_reduce, 64, filter_size=5, activation='relu', name='inception_4c_5x5') self.inception_4c_pool = max_pool_2d(self.inception_4b_output, kernel_size=3, strides=1, name='inception_4c_pool') self.inception_4c_pool_proj = conv_2d(self.inception_4c_pool, 64, filter_size=1, activation='relu', name='inception_4c_pool_proj') self.inception_4c_output = merge([ self.inception_4c_1x1, self.inception_4c_3x3, self.inception_4c_5x5, self.inception_4c_pool_proj ], mode='concat', axis=3, name='inception_4c_output') self.inception_4d_1x1 = conv_2d(self.inception_4c_output, 112, filter_size=1, activation='relu', name='inception_4d_1x1') self.inception_4d_3x3_reduce = conv_2d(self.inception_4c_output, 144, filter_size=1, activation='relu', name='inception_4d_3x3_reduce') self.inception_4d_3x3 = conv_2d(self.inception_4d_3x3_reduce, 288, filter_size=3, activation='relu', name='inception_4d_3x3') self.inception_4d_5x5_reduce = conv_2d(self.inception_4c_output, 32, filter_size=1, activation='relu', name='inception_4d_5x5_reduce') self.inception_4d_5x5 = conv_2d(self.inception_4d_5x5_reduce, 64, filter_size=5, activation='relu', name='inception_4d_5x5') self.inception_4d_pool = max_pool_2d(self.inception_4c_output, kernel_size=3, strides=1, name='inception_4d_pool') self.inception_4d_pool_proj = conv_2d(self.inception_4d_pool, 64, filter_size=1, activation='relu', name='inception_4d_pool_proj') self.inception_4d_output = merge([ self.inception_4d_1x1, self.inception_4d_3x3, self.inception_4d_5x5, self.inception_4d_pool_proj ], mode='concat', axis=3, name='inception_4d_output') self.inception_4e_1x1 = conv_2d(self.inception_4d_output, 256, filter_size=1, activation='relu', name='inception_4e_1x1') self.inception_4e_3x3_reduce = conv_2d(self.inception_4d_output, 160, filter_size=1, activation='relu', name='inception_4e_3x3_reduce') self.inception_4e_3x3 = conv_2d(self.inception_4e_3x3_reduce, 320, filter_size=3, activation='relu', name='inception_4e_3x3') self.inception_4e_5x5_reduce = conv_2d(self.inception_4d_output, 32, filter_size=1, activation='relu', name='inception_4e_5x5_reduce') self.inception_4e_5x5 = conv_2d(self.inception_4e_5x5_reduce, 128, filter_size=5, activation='relu', name='inception_4e_5x5') self.inception_4e_pool = max_pool_2d(self.inception_4d_output, kernel_size=3, strides=1, name='inception_4e_pool') self.inception_4e_pool_proj = conv_2d(self.inception_4e_pool, 128, filter_size=1, activation='relu', name='inception_4e_pool_proj') self.inception_4e_output = merge([ self.inception_4e_1x1, self.inception_4e_3x3, self.inception_4e_5x5, self.inception_4e_pool_proj ], axis=3, mode='concat', name='inception_4e_output') self.pool4_3x3_s2 = max_pool_2d(self.inception_4e_output, kernel_size=3, strides=2, name='pool4_3x3_s2') self.inception_5a_1x1 = conv_2d(self.pool4_3x3_s2, 256, filter_size=1, activation='relu', name='inception_5a_1x1') self.inception_5a_3x3_reduce = conv_2d(self.pool4_3x3_s2, 160, filter_size=1, activation='relu', name='inception_5a_3x3_reduce') self.inception_5a_3x3 = conv_2d(self.inception_5a_3x3_reduce, 320, filter_size=3, activation='relu', name='inception_5a_3x3') self.inception_5a_5x5_reduce = conv_2d(self.pool4_3x3_s2, 32, filter_size=1, activation='relu', name='inception_5a_5x5_reduce') self.inception_5a_5x5 = conv_2d(self.inception_5a_5x5_reduce, 128, filter_size=5, activation='relu', name='inception_5a_5x5') self.inception_5a_pool = max_pool_2d(self.pool4_3x3_s2, kernel_size=3, strides=1, name='inception_5a_pool') self.inception_5a_pool_proj = conv_2d(self.inception_5a_pool, 128, filter_size=1, activation='relu', name='inception_5a_pool_proj') self.inception_5a_output = merge([ self.inception_5a_1x1, self.inception_5a_3x3, self.inception_5a_5x5, self.inception_5a_pool_proj ], axis=3, mode='concat', name="inception_5a_output") self.inception_5b_1x1 = conv_2d(self.inception_5a_output, 384, filter_size=1, activation='relu', name='inception_5b_1x1') self.inception_5b_3x3_reduce = conv_2d(self.inception_5a_output, 192, filter_size=1, activation='relu', name='inception_5b_3x3_reduce') self.inception_5b_3x3 = conv_2d(self.inception_5b_3x3_reduce, 384, filter_size=3, activation='relu', name='inception_5b_3x3') self.inception_5b_5x5_reduce = conv_2d(self.inception_5a_output, 48, filter_size=1, activation='relu', name='inception_5b_5x5_reduce') self.inception_5b_5x5 = conv_2d(self.inception_5b_5x5_reduce, 128, filter_size=5, activation='relu', name='inception_5b_5x5') self.inception_5b_pool = max_pool_2d(self.inception_5a_output, kernel_size=3, strides=1, name='inception_5b_pool') self.inception_5b_pool_proj = conv_2d(self.inception_5b_pool, 128, filter_size=1, activation='relu', name='inception_5b_pool_proj') self.inception_5b_output = merge([ self.inception_5b_1x1, self.inception_5b_3x3, self.inception_5b_5x5, self.inception_5b_pool_proj ], axis=3, mode='concat', name='inception_5b_output') self.pool5_7x7_s1 = avg_pool_2d(self.inception_5b_output, kernel_size=7, strides=1, name='pool5_7x7_s1') self.pool5_7x7_s1 = dropout(self.pool5_7x7_s1, 0.4, name="dropout_7x7_s1") # Output self.loss3_classifier = fully_connected(self.pool5_7x7_s1, label_size, activation='softmax', name='loss3_classifier') self.network = regression(self.loss3_classifier, optimizer='momentum', loss='categorical_crossentropy', learning_rate=0.001) self.model = tflearn.DNN(self.network, tensorboard_verbose=0)
def inception_v3(width, height, frame_count, lr, output=9, model_name = 'sentnet_color.model'): network = input_data(shape=[None, width, height,3], name='input') conv1_7_7 = conv_2d(network, 64, 7, strides=2, activation='relu', name = 'conv1_7_7_s2') pool1_3_3 = max_pool_2d(conv1_7_7, 3,strides=2) pool1_3_3 = local_response_normalization(pool1_3_3) conv2_3_3_reduce = conv_2d(pool1_3_3, 64,1, activation='relu',name = 'conv2_3_3_reduce') conv2_3_3 = conv_2d(conv2_3_3_reduce, 192,3, activation='relu', name='conv2_3_3') conv2_3_3 = local_response_normalization(conv2_3_3) pool2_3_3 = max_pool_2d(conv2_3_3, kernel_size=3, strides=2, name='pool2_3_3_s2') inception_3a_1_1 = conv_2d(pool2_3_3, 64, 1, activation='relu', name='inception_3a_1_1') inception_3a_3_3_reduce = conv_2d(pool2_3_3, 96,1, activation='relu', name='inception_3a_3_3_reduce') inception_3a_3_3 = conv_2d(inception_3a_3_3_reduce, 128,filter_size=3, activation='relu', name = 'inception_3a_3_3') inception_3a_5_5_reduce = conv_2d(pool2_3_3,16, filter_size=1,activation='relu', name ='inception_3a_5_5_reduce' ) inception_3a_5_5 = conv_2d(inception_3a_5_5_reduce, 32, filter_size=5, activation='relu', name= 'inception_3a_5_5') inception_3a_pool = max_pool_2d(pool2_3_3, kernel_size=3, strides=1, ) inception_3a_pool_1_1 = conv_2d(inception_3a_pool, 32, filter_size=1, activation='relu', name='inception_3a_pool_1_1') # merge the inception_3a__ inception_3a_output = merge([inception_3a_1_1, inception_3a_3_3, inception_3a_5_5, inception_3a_pool_1_1], mode='concat', axis=3) inception_3b_1_1 = conv_2d(inception_3a_output, 128,filter_size=1,activation='relu', name= 'inception_3b_1_1' ) inception_3b_3_3_reduce = conv_2d(inception_3a_output, 128, filter_size=1, activation='relu', name='inception_3b_3_3_reduce') inception_3b_3_3 = conv_2d(inception_3b_3_3_reduce, 192, filter_size=3, activation='relu',name='inception_3b_3_3') inception_3b_5_5_reduce = conv_2d(inception_3a_output, 32, filter_size=1, activation='relu', name = 'inception_3b_5_5_reduce') inception_3b_5_5 = conv_2d(inception_3b_5_5_reduce, 96, filter_size=5, name = 'inception_3b_5_5') inception_3b_pool = max_pool_2d(inception_3a_output, kernel_size=3, strides=1, name='inception_3b_pool') inception_3b_pool_1_1 = conv_2d(inception_3b_pool, 64, filter_size=1,activation='relu', name='inception_3b_pool_1_1') #merge the inception_3b_* inception_3b_output = merge([inception_3b_1_1, inception_3b_3_3, inception_3b_5_5, inception_3b_pool_1_1], mode='concat',axis=3,name='inception_3b_output') pool3_3_3 = max_pool_2d(inception_3b_output, kernel_size=3, strides=2, name='pool3_3_3') inception_4a_1_1 = conv_2d(pool3_3_3, 192, filter_size=1, activation='relu', name='inception_4a_1_1') inception_4a_3_3_reduce = conv_2d(pool3_3_3, 96, filter_size=1, activation='relu', name='inception_4a_3_3_reduce') inception_4a_3_3 = conv_2d(inception_4a_3_3_reduce, 208, filter_size=3, activation='relu', name='inception_4a_3_3') inception_4a_5_5_reduce = conv_2d(pool3_3_3, 16, filter_size=1, activation='relu', name='inception_4a_5_5_reduce') inception_4a_5_5 = conv_2d(inception_4a_5_5_reduce, 48, filter_size=5, activation='relu', name='inception_4a_5_5') inception_4a_pool = max_pool_2d(pool3_3_3, kernel_size=3, strides=1, name='inception_4a_pool') inception_4a_pool_1_1 = conv_2d(inception_4a_pool, 64, filter_size=1, activation='relu', name='inception_4a_pool_1_1') inception_4a_output = merge([inception_4a_1_1, inception_4a_3_3, inception_4a_5_5, inception_4a_pool_1_1], mode='concat', axis=3, name='inception_4a_output') inception_4b_1_1 = conv_2d(inception_4a_output, 160, filter_size=1, activation='relu', name='inception_4a_1_1') inception_4b_3_3_reduce = conv_2d(inception_4a_output, 112, filter_size=1, activation='relu', name='inception_4b_3_3_reduce') inception_4b_3_3 = conv_2d(inception_4b_3_3_reduce, 224, filter_size=3, activation='relu', name='inception_4b_3_3') inception_4b_5_5_reduce = conv_2d(inception_4a_output, 24, filter_size=1, activation='relu', name='inception_4b_5_5_reduce') inception_4b_5_5 = conv_2d(inception_4b_5_5_reduce, 64, filter_size=5, activation='relu', name='inception_4b_5_5') inception_4b_pool = max_pool_2d(inception_4a_output, kernel_size=3, strides=1, name='inception_4b_pool') inception_4b_pool_1_1 = conv_2d(inception_4b_pool, 64, filter_size=1, activation='relu', name='inception_4b_pool_1_1') inception_4b_output = merge([inception_4b_1_1, inception_4b_3_3, inception_4b_5_5, inception_4b_pool_1_1], mode='concat', axis=3, name='inception_4b_output') inception_4c_1_1 = conv_2d(inception_4b_output, 128, filter_size=1, activation='relu',name='inception_4c_1_1') inception_4c_3_3_reduce = conv_2d(inception_4b_output, 128, filter_size=1, activation='relu', name='inception_4c_3_3_reduce') inception_4c_3_3 = conv_2d(inception_4c_3_3_reduce, 256, filter_size=3, activation='relu', name='inception_4c_3_3') inception_4c_5_5_reduce = conv_2d(inception_4b_output, 24, filter_size=1, activation='relu', name='inception_4c_5_5_reduce') inception_4c_5_5 = conv_2d(inception_4c_5_5_reduce, 64, filter_size=5, activation='relu', name='inception_4c_5_5') inception_4c_pool = max_pool_2d(inception_4b_output, kernel_size=3, strides=1) inception_4c_pool_1_1 = conv_2d(inception_4c_pool, 64, filter_size=1, activation='relu', name='inception_4c_pool_1_1') inception_4c_output = merge([inception_4c_1_1, inception_4c_3_3, inception_4c_5_5, inception_4c_pool_1_1], mode='concat', axis=3,name='inception_4c_output') inception_4d_1_1 = conv_2d(inception_4c_output, 112, filter_size=1, activation='relu', name='inception_4d_1_1') inception_4d_3_3_reduce = conv_2d(inception_4c_output, 144, filter_size=1, activation='relu', name='inception_4d_3_3_reduce') inception_4d_3_3 = conv_2d(inception_4d_3_3_reduce, 288, filter_size=3, activation='relu', name='inception_4d_3_3') inception_4d_5_5_reduce = conv_2d(inception_4c_output, 32, filter_size=1, activation='relu', name='inception_4d_5_5_reduce') inception_4d_5_5 = conv_2d(inception_4d_5_5_reduce, 64, filter_size=5, activation='relu', name='inception_4d_5_5') inception_4d_pool = max_pool_2d(inception_4c_output, kernel_size=3, strides=1, name='inception_4d_pool') inception_4d_pool_1_1 = conv_2d(inception_4d_pool, 64, filter_size=1, activation='relu', name='inception_4d_pool_1_1') inception_4d_output = merge([inception_4d_1_1, inception_4d_3_3, inception_4d_5_5, inception_4d_pool_1_1], mode='concat', axis=3, name='inception_4d_output') inception_4e_1_1 = conv_2d(inception_4d_output, 256, filter_size=1, activation='relu', name='inception_4e_1_1') inception_4e_3_3_reduce = conv_2d(inception_4d_output, 160, filter_size=1, activation='relu', name='inception_4e_3_3_reduce') inception_4e_3_3 = conv_2d(inception_4e_3_3_reduce, 320, filter_size=3, activation='relu', name='inception_4e_3_3') inception_4e_5_5_reduce = conv_2d(inception_4d_output, 32, filter_size=1, activation='relu', name='inception_4e_5_5_reduce') inception_4e_5_5 = conv_2d(inception_4e_5_5_reduce, 128, filter_size=5, activation='relu', name='inception_4e_5_5') inception_4e_pool = max_pool_2d(inception_4d_output, kernel_size=3, strides=1, name='inception_4e_pool') inception_4e_pool_1_1 = conv_2d(inception_4e_pool, 128, filter_size=1, activation='relu', name='inception_4e_pool_1_1') inception_4e_output = merge([inception_4e_1_1, inception_4e_3_3, inception_4e_5_5,inception_4e_pool_1_1],axis=3, mode='concat') pool4_3_3 = max_pool_2d(inception_4e_output, kernel_size=3, strides=2, name='pool_3_3') inception_5a_1_1 = conv_2d(pool4_3_3, 256, filter_size=1, activation='relu', name='inception_5a_1_1') inception_5a_3_3_reduce = conv_2d(pool4_3_3, 160, filter_size=1, activation='relu', name='inception_5a_3_3_reduce') inception_5a_3_3 = conv_2d(inception_5a_3_3_reduce, 320, filter_size=3, activation='relu', name='inception_5a_3_3') inception_5a_5_5_reduce = conv_2d(pool4_3_3, 32, filter_size=1, activation='relu', name='inception_5a_5_5_reduce') inception_5a_5_5 = conv_2d(inception_5a_5_5_reduce, 128, filter_size=5, activation='relu', name='inception_5a_5_5') inception_5a_pool = max_pool_2d(pool4_3_3, kernel_size=3, strides=1, name='inception_5a_pool') inception_5a_pool_1_1 = conv_2d(inception_5a_pool, 128, filter_size=1,activation='relu', name='inception_5a_pool_1_1') inception_5a_output = merge([inception_5a_1_1, inception_5a_3_3, inception_5a_5_5, inception_5a_pool_1_1], axis=3,mode='concat') inception_5b_1_1 = conv_2d(inception_5a_output, 384, filter_size=1,activation='relu', name='inception_5b_1_1') inception_5b_3_3_reduce = conv_2d(inception_5a_output, 192, filter_size=1, activation='relu', name='inception_5b_3_3_reduce') inception_5b_3_3 = conv_2d(inception_5b_3_3_reduce, 384, filter_size=3,activation='relu', name='inception_5b_3_3') inception_5b_5_5_reduce = conv_2d(inception_5a_output, 48, filter_size=1, activation='relu', name='inception_5b_5_5_reduce') inception_5b_5_5 = conv_2d(inception_5b_5_5_reduce,128, filter_size=5, activation='relu', name='inception_5b_5_5' ) inception_5b_pool = max_pool_2d(inception_5a_output, kernel_size=3, strides=1, name='inception_5b_pool') inception_5b_pool_1_1 = conv_2d(inception_5b_pool, 128, filter_size=1, activation='relu', name='inception_5b_pool_1_1') inception_5b_output = merge([inception_5b_1_1, inception_5b_3_3, inception_5b_5_5, inception_5b_pool_1_1], axis=3, mode='concat') pool5_7_7 = avg_pool_2d(inception_5b_output, kernel_size=7, strides=1) pool5_7_7 = dropout(pool5_7_7, 0.4) loss = fully_connected(pool5_7_7, output,activation='softmax') network = regression(loss, optimizer='momentum', loss='categorical_crossentropy', learning_rate=lr, name='targets') model = tflearn.DNN(network, max_checkpoints=0, tensorboard_verbose=0,tensorboard_dir='log') return model
inception_5a_5_5_reduce = conv_2d(pool4_3_3, 32, filter_size=1, activation='relu', name='inception_5a_5_5_reduce') inception_5a_5_5 = conv_2d(inception_5a_5_5_reduce, 128, filter_size=5, activation='relu', name='inception_5a_5_5') inception_5a_pool = max_pool_2d(pool4_3_3, kernel_size=3, strides=1, name='inception_5a_pool') inception_5a_pool_1_1 = conv_2d(inception_5a_pool, 128, filter_size=1, activation='relu', name='inception_5a_pool_1_1') inception_5a_output = merge([inception_5a_1_1, inception_5a_3_3, inception_5a_5_5, inception_5a_pool_1_1], axis=3, mode='concat') # 5b inception_5b_1_1 = conv_2d(inception_5a_output, 384, filter_size=1, activation='relu', name='inception_5b_1_1') inception_5b_3_3_reduce = conv_2d(inception_5a_output, 192, filter_size=1, activation='relu', name='inception_5b_3_3_reduce') inception_5b_3_3 = conv_2d(inception_5b_3_3_reduce, 384, filter_size=3, activation='relu', name='inception_5b_3_3') inception_5b_5_5_reduce = conv_2d(inception_5a_output, 48, filter_size=1, activation='relu', name='inception_5b_5_5_reduce') inception_5b_5_5 = conv_2d(inception_5b_5_5_reduce, 128, filter_size=5, activation='relu', name='inception_5b_5_5') inception_5b_pool = max_pool_2d(inception_5a_output, kernel_size=3, strides=1, name='inception_5b_pool') inception_5b_pool_1_1 = conv_2d(inception_5b_pool, 128, filter_size=1, activation='relu', name='inception_5b_pool_1_1') inception_5b_output = merge([inception_5b_1_1, inception_5b_3_3, inception_5b_5_5, inception_5b_pool_1_1], axis=3, mode='concat') pool5_7_7 = avg_pool_2d(inception_5b_output, kernel_size=7, strides=1) pool5_7_7 = dropout(pool5_7_7, 0.4) # fc loss = fully_connected(pool5_7_7, 17, activation='softmax') network = regression(loss, optimizer='momentum', loss='categorical_crossentropy', learning_rate=0.001) # to train model = tflearn.DNN(network, checkpoint_path='model_googlenet', max_checkpoints=1, tensorboard_verbose=2) model.fit(X, Y, n_epoch=1000, validation_set=0.1, shuffle=True, show_metric=True, batch_size=64, snapshot_step=200, snapshot_epoch=False, run_id='googlenet_oxflowers17')
def _model4(): # Taken from TFLearn examples and based on Googles Inception. DO NOT RUN!!!! global yTest, img_aug tf.reset_default_graph() img_prep = ImagePreprocessing() img_prep.add_featurewise_zero_center() img_prep.add_featurewise_stdnorm() network = input_data(shape=[None, inputSize, inputSize, dim], name='input', data_preprocessing=img_prep, data_augmentation=img_aug) conv1_7_7 = conv_2d(network, 64, 7, strides=2, activation='relu', name='conv1_7_7_s2') pool1_3_3 = max_pool_2d(conv1_7_7, 3, strides=2) pool1_3_3 = local_response_normalization(pool1_3_3) conv2_3_3_reduce = conv_2d(pool1_3_3, 64, 1, activation='relu', name='conv2_3_3_reduce') conv2_3_3 = conv_2d(conv2_3_3_reduce, 192, 3, activation='relu', name='conv2_3_3') conv2_3_3 = local_response_normalization(conv2_3_3) pool2_3_3 = max_pool_2d(conv2_3_3, kernel_size=3, strides=2, name='pool2_3_3_s2') # 3a inception_3a_1_1 = conv_2d(pool2_3_3, 64, 1, activation='relu', name='inception_3a_1_1') inception_3a_3_3_reduce = conv_2d(pool2_3_3, 96, 1, activation='relu', name='inception_3a_3_3_reduce') inception_3a_3_3 = conv_2d(inception_3a_3_3_reduce, 128, filter_size=3, activation='relu', name='inception_3a_3_3') inception_3a_5_5_reduce = conv_2d(pool2_3_3, 16, filter_size=1, activation='relu', name='inception_3a_5_5_reduce') inception_3a_5_5 = conv_2d(inception_3a_5_5_reduce, 32, filter_size=5, activation='relu', name='inception_3a_5_5') inception_3a_pool = max_pool_2d(pool2_3_3, kernel_size=3, strides=1, name='inception_3a_pool') inception_3a_pool_1_1 = conv_2d(inception_3a_pool, 32, filter_size=1, activation='relu', name='inception_3a_pool_1_1') inception_3a_output = merge([inception_3a_1_1, inception_3a_3_3, inception_3a_5_5, inception_3a_pool_1_1], mode='concat', axis=3) # 3b inception_3b_1_1 = conv_2d(inception_3a_output, 128, filter_size=1, activation='relu', name='inception_3b_1_1') inception_3b_3_3_reduce = conv_2d(inception_3a_output, 128, filter_size=1, activation='relu', name='inception_3b_3_3_reduce') inception_3b_3_3 = conv_2d(inception_3b_3_3_reduce, 192, filter_size=3, activation='relu', name='inception_3b_3_3') inception_3b_5_5_reduce = conv_2d(inception_3a_output, 32, filter_size=1, activation='relu', name='inception_3b_5_5_reduce') inception_3b_5_5 = conv_2d(inception_3b_5_5_reduce, 96, filter_size=5, name='inception_3b_5_5') inception_3b_pool = max_pool_2d(inception_3a_output, kernel_size=3, strides=1, name='inception_3b_pool') inception_3b_pool_1_1 = conv_2d(inception_3b_pool, 64, filter_size=1, activation='relu', name='inception_3b_pool_1_1') inception_3b_output = merge([inception_3b_1_1, inception_3b_3_3, inception_3b_5_5, inception_3b_pool_1_1], mode='concat', axis=3, name='inception_3b_output') pool3_3_3 = max_pool_2d(inception_3b_output, kernel_size=3, strides=2, name='pool3_3_3') # 4a inception_4a_1_1 = conv_2d(pool3_3_3, 192, filter_size=1, activation='relu', name='inception_4a_1_1') inception_4a_3_3_reduce = conv_2d(pool3_3_3, 96, filter_size=1, activation='relu', name='inception_4a_3_3_reduce') inception_4a_3_3 = conv_2d(inception_4a_3_3_reduce, 208, filter_size=3, activation='relu', name='inception_4a_3_3') inception_4a_5_5_reduce = conv_2d(pool3_3_3, 16, filter_size=1, activation='relu', name='inception_4a_5_5_reduce') inception_4a_5_5 = conv_2d(inception_4a_5_5_reduce, 48, filter_size=5, activation='relu', name='inception_4a_5_5') inception_4a_pool = max_pool_2d(pool3_3_3, kernel_size=3, strides=1, name='inception_4a_pool') inception_4a_pool_1_1 = conv_2d(inception_4a_pool, 64, filter_size=1, activation='relu', name='inception_4a_pool_1_1') inception_4a_output = merge([inception_4a_1_1, inception_4a_3_3, inception_4a_5_5, inception_4a_pool_1_1], mode='concat', axis=3, name='inception_4a_output') # 4b inception_4b_1_1 = conv_2d(inception_4a_output, 160, filter_size=1, activation='relu', name='inception_4a_1_1') inception_4b_3_3_reduce = conv_2d(inception_4a_output, 112, filter_size=1, activation='relu', name='inception_4b_3_3_reduce') inception_4b_3_3 = conv_2d(inception_4b_3_3_reduce, 224, filter_size=3, activation='relu', name='inception_4b_3_3') inception_4b_5_5_reduce = conv_2d(inception_4a_output, 24, filter_size=1, activation='relu', name='inception_4b_5_5_reduce') inception_4b_5_5 = conv_2d(inception_4b_5_5_reduce, 64, filter_size=5, activation='relu', name='inception_4b_5_5') inception_4b_pool = max_pool_2d(inception_4a_output, kernel_size=3, strides=1, name='inception_4b_pool') inception_4b_pool_1_1 = conv_2d(inception_4b_pool, 64, filter_size=1, activation='relu', name='inception_4b_pool_1_1') inception_4b_output = merge([inception_4b_1_1, inception_4b_3_3, inception_4b_5_5, inception_4b_pool_1_1], mode='concat', axis=3, name='inception_4b_output') # 4c inception_4c_1_1 = conv_2d(inception_4b_output, 128, filter_size=1, activation='relu', name='inception_4c_1_1') inception_4c_3_3_reduce = conv_2d(inception_4b_output, 128, filter_size=1, activation='relu', name='inception_4c_3_3_reduce') inception_4c_3_3 = conv_2d(inception_4c_3_3_reduce, 256, filter_size=3, activation='relu', name='inception_4c_3_3') inception_4c_5_5_reduce = conv_2d(inception_4b_output, 24, filter_size=1, activation='relu', name='inception_4c_5_5_reduce') inception_4c_5_5 = conv_2d(inception_4c_5_5_reduce, 64, filter_size=5, activation='relu', name='inception_4c_5_5') inception_4c_pool = max_pool_2d(inception_4b_output, kernel_size=3, strides=1) inception_4c_pool_1_1 = conv_2d(inception_4c_pool, 64, filter_size=1, activation='relu', name='inception_4c_pool_1_1') inception_4c_output = merge([inception_4c_1_1, inception_4c_3_3, inception_4c_5_5, inception_4c_pool_1_1], mode='concat', axis=3, name='inception_4c_output') # 4d inception_4d_1_1 = conv_2d(inception_4c_output, 112, filter_size=1, activation='relu', name='inception_4d_1_1') inception_4d_3_3_reduce = conv_2d(inception_4c_output, 144, filter_size=1, activation='relu', name='inception_4d_3_3_reduce') inception_4d_3_3 = conv_2d(inception_4d_3_3_reduce, 288, filter_size=3, activation='relu', name='inception_4d_3_3') inception_4d_5_5_reduce = conv_2d(inception_4c_output, 32, filter_size=1, activation='relu', name='inception_4d_5_5_reduce') inception_4d_5_5 = conv_2d(inception_4d_5_5_reduce, 64, filter_size=5, activation='relu', name='inception_4d_5_5') inception_4d_pool = max_pool_2d(inception_4c_output, kernel_size=3, strides=1, name='inception_4d_pool') inception_4d_pool_1_1 = conv_2d(inception_4d_pool, 64, filter_size=1, activation='relu', name='inception_4d_pool_1_1') inception_4d_output = merge([inception_4d_1_1, inception_4d_3_3, inception_4d_5_5, inception_4d_pool_1_1], mode='concat', axis=3, name='inception_4d_output') # 4e inception_4e_1_1 = conv_2d(inception_4d_output, 256, filter_size=1, activation='relu', name='inception_4e_1_1') inception_4e_3_3_reduce = conv_2d(inception_4d_output, 160, filter_size=1, activation='relu', name='inception_4e_3_3_reduce') inception_4e_3_3 = conv_2d(inception_4e_3_3_reduce, 320, filter_size=3, activation='relu', name='inception_4e_3_3') inception_4e_5_5_reduce = conv_2d(inception_4d_output, 32, filter_size=1, activation='relu', name='inception_4e_5_5_reduce') inception_4e_5_5 = conv_2d(inception_4e_5_5_reduce, 128, filter_size=5, activation='relu', name='inception_4e_5_5') inception_4e_pool = max_pool_2d(inception_4d_output, kernel_size=3, strides=1, name='inception_4e_pool') inception_4e_pool_1_1 = conv_2d(inception_4e_pool, 128, filter_size=1, activation='relu', name='inception_4e_pool_1_1') inception_4e_output = merge([inception_4e_1_1, inception_4e_3_3, inception_4e_5_5, inception_4e_pool_1_1], axis=3, mode='concat') pool4_3_3 = max_pool_2d(inception_4e_output, kernel_size=3, strides=2, name='pool_3_3') # 5a inception_5a_1_1 = conv_2d(pool4_3_3, 256, filter_size=1, activation='relu', name='inception_5a_1_1') inception_5a_3_3_reduce = conv_2d(pool4_3_3, 160, filter_size=1, activation='relu', name='inception_5a_3_3_reduce') inception_5a_3_3 = conv_2d(inception_5a_3_3_reduce, 320, filter_size=3, activation='relu', name='inception_5a_3_3') inception_5a_5_5_reduce = conv_2d(pool4_3_3, 32, filter_size=1, activation='relu', name='inception_5a_5_5_reduce') inception_5a_5_5 = conv_2d(inception_5a_5_5_reduce, 128, filter_size=5, activation='relu', name='inception_5a_5_5') inception_5a_pool = max_pool_2d(pool4_3_3, kernel_size=3, strides=1, name='inception_5a_pool') inception_5a_pool_1_1 = conv_2d(inception_5a_pool, 128, filter_size=1, activation='relu', name='inception_5a_pool_1_1') inception_5a_output = merge([inception_5a_1_1, inception_5a_3_3, inception_5a_5_5, inception_5a_pool_1_1], axis=3, mode='concat') # 5b inception_5b_1_1 = conv_2d(inception_5a_output, 384, filter_size=1, activation='relu', name='inception_5b_1_1') inception_5b_3_3_reduce = conv_2d(inception_5a_output, 192, filter_size=1, activation='relu', name='inception_5b_3_3_reduce') inception_5b_3_3 = conv_2d(inception_5b_3_3_reduce, 384, filter_size=3, activation='relu', name='inception_5b_3_3') inception_5b_5_5_reduce = conv_2d(inception_5a_output, 48, filter_size=1, activation='relu', name='inception_5b_5_5_reduce') inception_5b_5_5 = conv_2d(inception_5b_5_5_reduce, 128, filter_size=5, activation='relu', name='inception_5b_5_5') inception_5b_pool = max_pool_2d(inception_5a_output, kernel_size=3, strides=1, name='inception_5b_pool') inception_5b_pool_1_1 = conv_2d(inception_5b_pool, 128, filter_size=1, activation='relu', name='inception_5b_pool_1_1') inception_5b_output = merge([inception_5b_1_1, inception_5b_3_3, inception_5b_5_5, inception_5b_pool_1_1], axis=3, mode='concat') pool5_7_7 = avg_pool_2d(inception_5b_output, kernel_size=7, strides=1) pool5_7_7 = dropout(pool5_7_7, 0.4) # fc loss = fully_connected(pool5_7_7, len(yTest[0]), activation='softmax') network = regression(loss, optimizer='momentum', loss='categorical_crossentropy', learning_rate=0.001) # to train model = tflearn.DNN(network, checkpoint_path='model_googlenet', max_checkpoints=1, tensorboard_verbose=2) model.load(_path) pred = model.predict(xTest) df = pd.DataFrame(pred) df.to_csv(_path + ".csv") newList = pred.copy() newList = convert2(newList) if _CSV: makeCSV(newList) pred = convert2(pred) pred = convert3(pred) yTest = convert3(yTest) print(metrics.confusion_matrix(yTest, pred)) print(metrics.classification_report(yTest, pred)) print('Accuracy', accuracy_score(yTest, pred)) print() if _wrFile: writeTest(pred)
conv2b_3_3 = relu(batch_normalization(conv_2d(conv2a_3_3, 64, 3, bias=False, activation=None, name='Conv2d_2b_3x3'))) maxpool3a_3_3 = max_pool_2d(conv2b_3_3, 3, strides=2, padding='VALID', name='MaxPool_3a_3x3') conv3b_1_1 = relu(batch_normalization(conv_2d(maxpool3a_3_3, 80, 1, bias=False, padding='VALID',activation=None, name='Conv2d_3b_1x1'))) conv4a_3_3 = relu(batch_normalization(conv_2d(conv3b_1_1, 192, 3, bias=False, padding='VALID',activation=None, name='Conv2d_4a_3x3'))) maxpool5a_3_3 = max_pool_2d(conv4a_3_3, 3, strides=2, padding='VALID', name='MaxPool_5a_3x3') tower_conv = relu(batch_normalization(conv_2d(maxpool5a_3_3, 96, 1, bias=False, activation=None, name='Conv2d_5b_b0_1x1'))) tower_conv1_0 = relu(batch_normalization(conv_2d(maxpool5a_3_3, 48, 1, bias=False, activation=None, name='Conv2d_5b_b1_0a_1x1'))) tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1_0, 64, 5, bias=False, activation=None, name='Conv2d_5b_b1_0b_5x5'))) tower_conv2_0 = relu(batch_normalization(conv_2d(maxpool5a_3_3, 64, 1, bias=False, activation=None, name='Conv2d_5b_b2_0a_1x1'))) tower_conv2_1 = relu(batch_normalization(conv_2d(tower_conv2_0, 96, 3, bias=False, activation=None, name='Conv2d_5b_b2_0b_3x3'))) tower_conv2_2 = relu(batch_normalization(conv_2d(tower_conv2_1, 96, 3, bias=False, activation=None,name='Conv2d_5b_b2_0c_3x3'))) tower_pool3_0 = avg_pool_2d(maxpool5a_3_3, 3, strides=1, padding='same', name='AvgPool_5b_b3_0a_3x3') tower_conv3_1 = relu(batch_normalization(conv_2d(tower_pool3_0, 64, 1, bias=False, activation=None,name='Conv2d_5b_b3_0b_1x1'))) tower_5b_out = merge([tower_conv, tower_conv1_1, tower_conv2_2, tower_conv3_1], mode='concat', axis=3) net = repeat(tower_5b_out, 10, block35, scale=0.17) tower_conv = relu(batch_normalization(conv_2d(net, 384, 3, bias=False, strides=2,activation=None, padding='VALID', name='Conv2d_6a_b0_0a_3x3'))) tower_conv1_0 = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, activation=None, name='Conv2d_6a_b1_0a_1x1'))) tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1_0, 256, 3, bias=False, activation=None, name='Conv2d_6a_b1_0b_3x3'))) tower_conv1_2 = relu(batch_normalization(conv_2d(tower_conv1_1, 384, 3, bias=False, strides=2, padding='VALID', activation=None,name='Conv2d_6a_b1_0c_3x3'))) tower_pool = max_pool_2d(net, 3, strides=2, padding='VALID',name='MaxPool_1a_3x3') net = merge([tower_conv, tower_conv1_2, tower_pool], mode='concat', axis=3) net = repeat(net, 20, block17, scale=0.1) tower_conv = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, activation=None, name='Conv2d_0a_1x1')))
def _model5(): global yTest, img_aug tf.reset_default_graph() img_prep = ImagePreprocessing() img_prep.add_featurewise_zero_center() img_prep.add_featurewise_stdnorm() def block35(net, scale=1.0, activation="relu"): tower_conv = relu(batch_normalization(conv_2d(net, 32, 1, bias=False, activation=None, name='Conv2d_1x1'))) tower_conv1_0 = relu(batch_normalization(conv_2d(net, 32, 1, bias=False, activation=None,name='Conv2d_0a_1x1'))) tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1_0, 32, 3, bias=False, activation=None,name='Conv2d_0b_3x3'))) tower_conv2_0 = relu(batch_normalization(conv_2d(net, 32, 1, bias=False, activation=None, name='Conv2d_0a_1x1'))) tower_conv2_1 = relu(batch_normalization(conv_2d(tower_conv2_0, 48,3, bias=False, activation=None, name='Conv2d_0b_3x3'))) tower_conv2_2 = relu(batch_normalization(conv_2d(tower_conv2_1, 64,3, bias=False, activation=None, name='Conv2d_0c_3x3'))) tower_mixed = merge([tower_conv, tower_conv1_1, tower_conv2_2], mode='concat', axis=3) tower_out = relu(batch_normalization(conv_2d(tower_mixed, net.get_shape()[3], 1, bias=False, activation=None, name='Conv2d_1x1'))) net += scale * tower_out if activation: if isinstance(activation, str): net = activations.get(activation)(net) elif hasattr(activation, '__call__'): net = activation(net) else: raise ValueError("Invalid Activation.") return net def block17(net, scale=1.0, activation="relu"): tower_conv = relu(batch_normalization(conv_2d(net, 192, 1, bias=False, activation=None, name='Conv2d_1x1'))) tower_conv_1_0 = relu(batch_normalization(conv_2d(net, 128, 1, bias=False, activation=None, name='Conv2d_0a_1x1'))) tower_conv_1_1 = relu(batch_normalization(conv_2d(tower_conv_1_0, 160,[1,7], bias=False, activation=None,name='Conv2d_0b_1x7'))) tower_conv_1_2 = relu(batch_normalization(conv_2d(tower_conv_1_1, 192, [7,1], bias=False, activation=None,name='Conv2d_0c_7x1'))) tower_mixed = merge([tower_conv,tower_conv_1_2], mode='concat', axis=3) tower_out = relu(batch_normalization(conv_2d(tower_mixed, net.get_shape()[3], 1, bias=False, activation=None, name='Conv2d_1x1'))) net += scale * tower_out if activation: if isinstance(activation, str): net = activations.get(activation)(net) elif hasattr(activation, '__call__'): net = activation(net) else: raise ValueError("Invalid Activation.") return net def block8(net, scale=1.0, activation="relu"): tower_conv = relu(batch_normalization(conv_2d(net, 192, 1, bias=False, activation=None, name='Conv2d_1x1'))) tower_conv1_0 = relu(batch_normalization(conv_2d(net, 192, 1, bias=False, activation=None, name='Conv2d_0a_1x1'))) tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1_0, 224, [1,3], bias=False, activation=None, name='Conv2d_0b_1x3'))) tower_conv1_2 = relu(batch_normalization(conv_2d(tower_conv1_1, 256, [3,1], bias=False, name='Conv2d_0c_3x1'))) tower_mixed = merge([tower_conv,tower_conv1_2], mode='concat', axis=3) tower_out = relu(batch_normalization(conv_2d(tower_mixed, net.get_shape()[3], 1, bias=False, activation=None, name='Conv2d_1x1'))) net += scale * tower_out if activation: if isinstance(activation, str): net = activations.get(activation)(net) elif hasattr(activation, '__call__'): net = activation(net) else: raise ValueError("Invalid Activation.") return net num_classes = len(yTest[0]) dropout_keep_prob = 0.8 network = input_data(shape=[None, inputSize, inputSize, dim], name='input', data_preprocessing=img_prep, data_augmentation=img_aug) conv1a_3_3 = relu(batch_normalization(conv_2d(network, 32, 3, strides=2, bias=False, padding='VALID',activation=None,name='Conv2d_1a_3x3'))) conv2a_3_3 = relu(batch_normalization(conv_2d(conv1a_3_3, 32, 3, bias=False, padding='VALID',activation=None, name='Conv2d_2a_3x3'))) conv2b_3_3 = relu(batch_normalization(conv_2d(conv2a_3_3, 64, 3, bias=False, activation=None, name='Conv2d_2b_3x3'))) maxpool3a_3_3 = max_pool_2d(conv2b_3_3, 3, strides=2, padding='VALID', name='MaxPool_3a_3x3') conv3b_1_1 = relu(batch_normalization(conv_2d(maxpool3a_3_3, 80, 1, bias=False, padding='VALID',activation=None, name='Conv2d_3b_1x1'))) conv4a_3_3 = relu(batch_normalization(conv_2d(conv3b_1_1, 192, 3, bias=False, padding='VALID',activation=None, name='Conv2d_4a_3x3'))) maxpool5a_3_3 = max_pool_2d(conv4a_3_3, 3, strides=2, padding='VALID', name='MaxPool_5a_3x3') tower_conv = relu(batch_normalization(conv_2d(maxpool5a_3_3, 96, 1, bias=False, activation=None, name='Conv2d_5b_b0_1x1'))) tower_conv1_0 = relu(batch_normalization(conv_2d(maxpool5a_3_3, 48, 1, bias=False, activation=None, name='Conv2d_5b_b1_0a_1x1'))) tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1_0, 64, 5, bias=False, activation=None, name='Conv2d_5b_b1_0b_5x5'))) tower_conv2_0 = relu(batch_normalization(conv_2d(maxpool5a_3_3, 64, 1, bias=False, activation=None, name='Conv2d_5b_b2_0a_1x1'))) tower_conv2_1 = relu(batch_normalization(conv_2d(tower_conv2_0, 96, 3, bias=False, activation=None, name='Conv2d_5b_b2_0b_3x3'))) tower_conv2_2 = relu(batch_normalization(conv_2d(tower_conv2_1, 96, 3, bias=False, activation=None,name='Conv2d_5b_b2_0c_3x3'))) tower_pool3_0 = avg_pool_2d(maxpool5a_3_3, 3, strides=1, padding='same', name='AvgPool_5b_b3_0a_3x3') tower_conv3_1 = relu(batch_normalization(conv_2d(tower_pool3_0, 64, 1, bias=False, activation=None,name='Conv2d_5b_b3_0b_1x1'))) tower_5b_out = merge([tower_conv, tower_conv1_1, tower_conv2_2, tower_conv3_1], mode='concat', axis=3) net = repeat(tower_5b_out, 10, block35, scale=0.17) ''' tower_conv = relu(batch_normalization(conv_2d(net, 384, 3, bias=False, strides=2,activation=None, padding='VALID', name='Conv2d_6a_b0_0a_3x3'))) tower_conv1_0 = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, activation=None, name='Conv2d_6a_b1_0a_1x1'))) tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1_0, 256, 3, bias=False, activation=None, name='Conv2d_6a_b1_0b_3x3'))) tower_conv1_2 = relu(batch_normalization(conv_2d(tower_conv1_1, 384, 3, bias=False, strides=2, padding='VALID', activation=None,name='Conv2d_6a_b1_0c_3x3'))) tower_pool = max_pool_2d(net, 3, strides=2, padding='VALID',name='MaxPool_1a_3x3') net = merge([tower_conv, tower_conv1_2, tower_pool], mode='concat', axis=3) net = repeat(net, 20, block17, scale=0.1) tower_conv = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, activation=None, name='Conv2d_0a_1x1'))) tower_conv0_1 = relu(batch_normalization(conv_2d(tower_conv, 384, 3, bias=False, strides=2, padding='VALID', activation=None,name='Conv2d_0a_1x1'))) tower_conv1 = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, padding='VALID', activation=None,name='Conv2d_0a_1x1'))) tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1,288,3, bias=False, strides=2, padding='VALID',activation=None, name='COnv2d_1a_3x3'))) tower_conv2 = relu(batch_normalization(conv_2d(net, 256,1, bias=False, activation=None,name='Conv2d_0a_1x1'))) tower_conv2_1 = relu(batch_normalization(conv_2d(tower_conv2, 288,3, bias=False, name='Conv2d_0b_3x3',activation=None))) tower_conv2_2 = relu(batch_normalization(conv_2d(tower_conv2_1, 320, 3, bias=False, strides=2, padding='VALID',activation=None, name='Conv2d_1a_3x3'))) tower_pool = max_pool_2d(net, 3, strides=2, padding='VALID', name='MaxPool_1a_3x3') ''' tower_conv = relu(batch_normalization(conv_2d(net, 384, 1, bias=False, strides=2,activation=None, padding='VALID', name='Conv2d_6a_b0_0a_3x3'))) tower_conv1_0 = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, activation=None, name='Conv2d_6a_b1_0a_1x1'))) tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1_0, 256, 1, bias=False, activation=None, name='Conv2d_6a_b1_0b_3x3'))) tower_conv1_2 = relu(batch_normalization(conv_2d(tower_conv1_1, 384, 1, bias=False, strides=2, padding='VALID', activation=None,name='Conv2d_6a_b1_0c_3x3'))) tower_pool = max_pool_2d(net, 1, strides=2, padding='VALID',name='MaxPool_1a_3x3') net = merge([tower_conv, tower_conv1_2, tower_pool], mode='concat', axis=3) net = repeat(net, 20, block17, scale=0.1) tower_conv = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, activation=None, name='Conv2d_0a_1x1'))) tower_conv0_1 = relu(batch_normalization(conv_2d(tower_conv, 384, 1, bias=False, strides=2, padding='VALID', activation=None,name='Conv2d_0a_1x1'))) tower_conv1 = relu(batch_normalization(conv_2d(net, 256, 1, bias=False, padding='VALID', activation=None,name='Conv2d_0a_1x1'))) tower_conv1_1 = relu(batch_normalization(conv_2d(tower_conv1,288,1, bias=False, strides=2, padding='VALID',activation=None, name='COnv2d_1a_3x3'))) tower_conv2 = relu(batch_normalization(conv_2d(net, 256,1, bias=False, activation=None,name='Conv2d_0a_1x1'))) tower_conv2_1 = relu(batch_normalization(conv_2d(tower_conv2, 288,1, bias=False, name='Conv2d_0b_3x3',activation=None))) tower_conv2_2 = relu(batch_normalization(conv_2d(tower_conv2_1, 320, 1, bias=False, strides=2, padding='VALID',activation=None, name='Conv2d_1a_3x3'))) tower_pool = max_pool_2d(net, 1, strides=2, padding='VALID', name='MaxPool_1a_3x3') #### net = merge([tower_conv0_1, tower_conv1_1,tower_conv2_2, tower_pool], mode='concat', axis=3) net = repeat(net, 9, block8, scale=0.2) net = block8(net, activation=None) net = relu(batch_normalization(conv_2d(net, 1536, 1, bias=False, activation=None, name='Conv2d_7b_1x1'))) net = avg_pool_2d(net, net.get_shape().as_list()[1:3],strides=2, padding='VALID', name='AvgPool_1a_8x8') net = flatten(net) net = dropout(net, dropout_keep_prob) loss = fully_connected(net, num_classes,activation='softmax') network = tflearn.regression(loss, optimizer='RMSprop', loss='categorical_crossentropy', learning_rate=0.0001) model = tflearn.DNN(network, checkpoint_path='inception_resnet_v2', max_checkpoints=1, tensorboard_verbose=2, tensorboard_dir="./tflearn_logs/") model.load(_path) pred = model.predict(xTest) df = pd.DataFrame(pred) df.to_csv(_path + ".csv") newList = pred.copy() newList = convert2(newList) if _CSV: makeCSV(newList) pred = convert2(pred) pred = convert3(pred) yTest = convert3(yTest) print(metrics.confusion_matrix(yTest, pred)) print(metrics.classification_report(yTest, pred)) print('Accuracy', accuracy_score(yTest, pred)) print() if _wrFile: writeTest(pred)
def create_Inception(num_classes): # Building 'modfied google network' network = input_data(shape=[None, FLAGS.sample_size, FLAGS.sample_size, 1]) conv1_7_7 = conv_2d(network, 64, 7, strides=2, activation='relu', name='conv1_7_7_s2') pool1_3_3 = max_pool_2d(conv1_7_7, 3, strides=2) pool1_3_3 = local_response_normalization(pool1_3_3) conv2_3_3_reduce = conv_2d(pool1_3_3, 64, 1, activation='relu', name='conv2_3_3_reduce') conv2_3_3 = conv_2d(conv2_3_3_reduce, 192, 3, activation='relu', name='conv2_3_3') conv2_3_3 = local_response_normalization(conv2_3_3) pool2_3_3 = max_pool_2d(conv2_3_3, kernel_size=3, strides=2, name='pool2_3_3_s2') # 3a inception_3a_1_1 = conv_2d(pool2_3_3, 64, 1, activation='relu', name='inception_3a_1_1') inception_3a_3_3_reduce = conv_2d(pool2_3_3, 96, 1, activation='relu', name='inception_3a_3_3_reduce') inception_3a_3_3 = conv_2d(inception_3a_3_3_reduce, 128, filter_size=3, activation='relu', name='inception_3a_3_3') inception_3a_5_5_reduce = conv_2d(pool2_3_3, 16, filter_size=1, activation='relu', name='inception_3a_5_5_reduce') inception_3a_5_5 = conv_2d(inception_3a_5_5_reduce, 32, filter_size=5, activation='relu', name='inception_3a_5_5') inception_3a_pool = max_pool_2d(pool2_3_3, kernel_size=3, strides=1, name='inception_3a_pool') inception_3a_pool_1_1 = conv_2d(inception_3a_pool, 32, filter_size=1, activation='relu', name='inception_3a_pool_1_1') inception_3a_output = merge([inception_3a_1_1, inception_3a_3_3, inception_3a_5_5, inception_3a_pool_1_1], mode='concat', axis=3) # 3b inception_3b_1_1 = conv_2d(inception_3a_output, 128, filter_size=1, activation='relu', name='inception_3b_1_1') inception_3b_3_3_reduce = conv_2d(inception_3a_output, 128, filter_size=1, activation='relu', name='inception_3b_3_3_reduce') inception_3b_3_3 = conv_2d(inception_3b_3_3_reduce, 192, filter_size=3, activation='relu', name='inception_3b_3_3') inception_3b_5_5_reduce = conv_2d(inception_3a_output, 32, filter_size=1, activation='relu', name='inception_3b_5_5_reduce') inception_3b_5_5 = conv_2d(inception_3b_5_5_reduce, 96, filter_size=5, name='inception_3b_5_5') inception_3b_pool = max_pool_2d(inception_3a_output, kernel_size=3, strides=1, name='inception_3b_pool') inception_3b_pool_1_1 = conv_2d(inception_3b_pool, 64, filter_size=1, activation='relu', name='inception_3b_pool_1_1') inception_3b_output = merge([inception_3b_1_1, inception_3b_3_3, inception_3b_5_5, inception_3b_pool_1_1], mode='concat', axis=3, name='inception_3b_output') pool3_3_3 = max_pool_2d(inception_3b_output, kernel_size=3, strides=2, name='pool3_3_3') # 4a inception_4a_1_1 = conv_2d(pool3_3_3, 192, filter_size=1, activation='relu', name='inception_4a_1_1') inception_4a_3_3_reduce = conv_2d(pool3_3_3, 96, filter_size=1, activation='relu', name='inception_4a_3_3_reduce') inception_4a_3_3 = conv_2d(inception_4a_3_3_reduce, 208, filter_size=3, activation='relu', name='inception_4a_3_3') inception_4a_5_5_reduce = conv_2d(pool3_3_3, 16, filter_size=1, activation='relu', name='inception_4a_5_5_reduce') inception_4a_5_5 = conv_2d(inception_4a_5_5_reduce, 48, filter_size=5, activation='relu', name='inception_4a_5_5') inception_4a_pool = max_pool_2d(pool3_3_3, kernel_size=3, strides=1, name='inception_4a_pool') inception_4a_pool_1_1 = conv_2d(inception_4a_pool, 64, filter_size=1, activation='relu', name='inception_4a_pool_1_1') inception_4a_output = merge([inception_4a_1_1, inception_4a_3_3, inception_4a_5_5, inception_4a_pool_1_1], mode='concat', axis=3, name='inception_4a_output') # 4b inception_4b_1_1 = conv_2d(inception_4a_output, 160, filter_size=1, activation='relu', name='inception_4a_1_1') inception_4b_3_3_reduce = conv_2d(inception_4a_output, 112, filter_size=1, activation='relu', name='inception_4b_3_3_reduce') inception_4b_3_3 = conv_2d(inception_4b_3_3_reduce, 224, filter_size=3, activation='relu', name='inception_4b_3_3') inception_4b_5_5_reduce = conv_2d(inception_4a_output, 24, filter_size=1, activation='relu', name='inception_4b_5_5_reduce') inception_4b_5_5 = conv_2d(inception_4b_5_5_reduce, 64, filter_size=5, activation='relu', name='inception_4b_5_5') inception_4b_pool = max_pool_2d(inception_4a_output, kernel_size=3, strides=1, name='inception_4b_pool') inception_4b_pool_1_1 = conv_2d(inception_4b_pool, 64, filter_size=1, activation='relu', name='inception_4b_pool_1_1') inception_4b_output = merge([inception_4b_1_1, inception_4b_3_3, inception_4b_5_5, inception_4b_pool_1_1], mode='concat', axis=3, name='inception_4b_output') # 4c inception_4c_1_1 = conv_2d(inception_4b_output, 128, filter_size=1, activation='relu', name='inception_4c_1_1') inception_4c_3_3_reduce = conv_2d(inception_4b_output, 128, filter_size=1, activation='relu', name='inception_4c_3_3_reduce') inception_4c_3_3 = conv_2d(inception_4c_3_3_reduce, 256, filter_size=3, activation='relu', name='inception_4c_3_3') inception_4c_5_5_reduce = conv_2d(inception_4b_output, 24, filter_size=1, activation='relu', name='inception_4c_5_5_reduce') inception_4c_5_5 = conv_2d(inception_4c_5_5_reduce, 64, filter_size=5, activation='relu', name='inception_4c_5_5') inception_4c_pool = max_pool_2d(inception_4b_output, kernel_size=3, strides=1) inception_4c_pool_1_1 = conv_2d(inception_4c_pool, 64, filter_size=1, activation='relu', name='inception_4c_pool_1_1') inception_4c_output = merge([inception_4c_1_1, inception_4c_3_3, inception_4c_5_5, inception_4c_pool_1_1], mode='concat', axis=3, name='inception_4c_output') # 4d inception_4d_1_1 = conv_2d(inception_4c_output, 112, filter_size=1, activation='relu', name='inception_4d_1_1') inception_4d_3_3_reduce = conv_2d(inception_4c_output, 144, filter_size=1, activation='relu', name='inception_4d_3_3_reduce') inception_4d_3_3 = conv_2d(inception_4d_3_3_reduce, 288, filter_size=3, activation='relu', name='inception_4d_3_3') inception_4d_5_5_reduce = conv_2d(inception_4c_output, 32, filter_size=1, activation='relu', name='inception_4d_5_5_reduce') inception_4d_5_5 = conv_2d(inception_4d_5_5_reduce, 64, filter_size=5, activation='relu', name='inception_4d_5_5') inception_4d_pool = max_pool_2d(inception_4c_output, kernel_size=3, strides=1, name='inception_4d_pool') inception_4d_pool_1_1 = conv_2d(inception_4d_pool, 64, filter_size=1, activation='relu', name='inception_4d_pool_1_1') inception_4d_output = merge([inception_4d_1_1, inception_4d_3_3, inception_4d_5_5, inception_4d_pool_1_1], mode='concat', axis=3, name='inception_4d_output') # 4e inception_4e_1_1 = conv_2d(inception_4d_output, 256, filter_size=1, activation='relu', name='inception_4e_1_1') inception_4e_3_3_reduce = conv_2d(inception_4d_output, 160, filter_size=1, activation='relu', name='inception_4e_3_3_reduce') inception_4e_3_3 = conv_2d(inception_4e_3_3_reduce, 320, filter_size=3, activation='relu', name='inception_4e_3_3') inception_4e_5_5_reduce = conv_2d(inception_4d_output, 32, filter_size=1, activation='relu', name='inception_4e_5_5_reduce') inception_4e_5_5 = conv_2d(inception_4e_5_5_reduce, 128, filter_size=5, activation='relu', name='inception_4e_5_5') inception_4e_pool = max_pool_2d(inception_4d_output, kernel_size=3, strides=1, name='inception_4e_pool') inception_4e_pool_1_1 = conv_2d(inception_4e_pool, 128, filter_size=1, activation='relu', name='inception_4e_pool_1_1') inception_4e_output = merge([inception_4e_1_1, inception_4e_3_3, inception_4e_5_5, inception_4e_pool_1_1], axis=3, mode='concat') pool4_3_3 = max_pool_2d(inception_4e_output, kernel_size=3, strides=2, name='pool_3_3') # 5a inception_5a_1_1 = conv_2d(pool4_3_3, 256, filter_size=1, activation='relu', name='inception_5a_1_1') inception_5a_3_3_reduce = conv_2d(pool4_3_3, 160, filter_size=1, activation='relu', name='inception_5a_3_3_reduce') inception_5a_3_3 = conv_2d(inception_5a_3_3_reduce, 320, filter_size=3, activation='relu', name='inception_5a_3_3') inception_5a_5_5_reduce = conv_2d(pool4_3_3, 32, filter_size=1, activation='relu', name='inception_5a_5_5_reduce') inception_5a_5_5 = conv_2d(inception_5a_5_5_reduce, 128, filter_size=5, activation='relu', name='inception_5a_5_5') inception_5a_pool = max_pool_2d(pool4_3_3, kernel_size=3, strides=1, name='inception_5a_pool') inception_5a_pool_1_1 = conv_2d(inception_5a_pool, 128, filter_size=1, activation='relu', name='inception_5a_pool_1_1') inception_5a_output = merge([inception_5a_1_1, inception_5a_3_3, inception_5a_5_5, inception_5a_pool_1_1], axis=3, mode='concat') # 5b inception_5b_1_1 = conv_2d(inception_5a_output, 384, filter_size=1, activation='relu', name='inception_5b_1_1') inception_5b_3_3_reduce = conv_2d(inception_5a_output, 192, filter_size=1, activation='relu', name='inception_5b_3_3_reduce') inception_5b_3_3 = conv_2d(inception_5b_3_3_reduce, 384, filter_size=3, activation='relu', name='inception_5b_3_3') inception_5b_5_5_reduce = conv_2d(inception_5a_output, 48, filter_size=1, activation='relu', name='inception_5b_5_5_reduce') inception_5b_5_5 = conv_2d(inception_5b_5_5_reduce, 128, filter_size=5, activation='relu', name='inception_5b_5_5') inception_5b_pool = max_pool_2d(inception_5a_output, kernel_size=3, strides=1, name='inception_5b_pool') inception_5b_pool_1_1 = conv_2d(inception_5b_pool, 128, filter_size=1, activation='relu', name='inception_5b_pool_1_1') inception_5b_output = merge([inception_5b_1_1, inception_5b_3_3, inception_5b_5_5, inception_5b_pool_1_1], axis=3, mode='concat') pool5_7_7 = avg_pool_2d(inception_5b_output, kernel_size=7, strides=1) pool5_7_7 = dropout(pool5_7_7, 0.5) # fc loss = fully_connected(pool5_7_7, num_classes, activation='softmax') network = regression(loss, optimizer='momentum', loss='categorical_crossentropy', learning_rate=0.001) return network
def construct_inceptionv1onfire (x,y, training=False): # Build network as per architecture in [Dunnings/Breckon, 2018] network = input_data(shape=[None, y, x, 3]) conv1_7_7 = conv_2d(network, 64, 5, strides=2, activation='relu', name = 'conv1_7_7_s2') pool1_3_3 = max_pool_2d(conv1_7_7, 3,strides=2) pool1_3_3 = local_response_normalization(pool1_3_3) conv2_3_3_reduce = conv_2d(pool1_3_3, 64,1, activation='relu',name = 'conv2_3_3_reduce') conv2_3_3 = conv_2d(conv2_3_3_reduce, 128,3, activation='relu', name='conv2_3_3') conv2_3_3 = local_response_normalization(conv2_3_3) pool2_3_3 = max_pool_2d(conv2_3_3, kernel_size=3, strides=2, name='pool2_3_3_s2') inception_3a_1_1 = conv_2d(pool2_3_3, 64, 1, activation='relu', name='inception_3a_1_1') inception_3a_3_3_reduce = conv_2d(pool2_3_3, 96,1, activation='relu', name='inception_3a_3_3_reduce') inception_3a_3_3 = conv_2d(inception_3a_3_3_reduce, 128,filter_size=3, activation='relu', name = 'inception_3a_3_3') inception_3a_5_5_reduce = conv_2d(pool2_3_3,16, filter_size=1,activation='relu', name ='inception_3a_5_5_reduce' ) inception_3a_5_5 = conv_2d(inception_3a_5_5_reduce, 32, filter_size=5, activation='relu', name= 'inception_3a_5_5') inception_3a_pool = max_pool_2d(pool2_3_3, kernel_size=3, strides=1, ) inception_3a_pool_1_1 = conv_2d(inception_3a_pool, 32, filter_size=1, activation='relu', name='inception_3a_pool_1_1') # merge the inception_3a__ inception_3a_output = merge([inception_3a_1_1, inception_3a_3_3, inception_3a_5_5, inception_3a_pool_1_1], mode='concat', axis=3) inception_3b_1_1 = conv_2d(inception_3a_output, 128,filter_size=1,activation='relu', name= 'inception_3b_1_1' ) inception_3b_3_3_reduce = conv_2d(inception_3a_output, 128, filter_size=1, activation='relu', name='inception_3b_3_3_reduce') inception_3b_3_3 = conv_2d(inception_3b_3_3_reduce, 192, filter_size=3, activation='relu',name='inception_3b_3_3') inception_3b_5_5_reduce = conv_2d(inception_3a_output, 32, filter_size=1, activation='relu', name = 'inception_3b_5_5_reduce') inception_3b_5_5 = conv_2d(inception_3b_5_5_reduce, 96, filter_size=5, name = 'inception_3b_5_5') inception_3b_pool = max_pool_2d(inception_3a_output, kernel_size=3, strides=1, name='inception_3b_pool') inception_3b_pool_1_1 = conv_2d(inception_3b_pool, 64, filter_size=1,activation='relu', name='inception_3b_pool_1_1') #merge the inception_3b_* inception_3b_output = merge([inception_3b_1_1, inception_3b_3_3, inception_3b_5_5, inception_3b_pool_1_1], mode='concat',axis=3,name='inception_3b_output') pool3_3_3 = max_pool_2d(inception_3b_output, kernel_size=3, strides=2, name='pool3_3_3') inception_4a_1_1 = conv_2d(pool3_3_3, 192, filter_size=1, activation='relu', name='inception_4a_1_1') inception_4a_3_3_reduce = conv_2d(pool3_3_3, 96, filter_size=1, activation='relu', name='inception_4a_3_3_reduce') inception_4a_3_3 = conv_2d(inception_4a_3_3_reduce, 208, filter_size=3, activation='relu', name='inception_4a_3_3') inception_4a_5_5_reduce = conv_2d(pool3_3_3, 16, filter_size=1, activation='relu', name='inception_4a_5_5_reduce') inception_4a_5_5 = conv_2d(inception_4a_5_5_reduce, 48, filter_size=5, activation='relu', name='inception_4a_5_5') inception_4a_pool = max_pool_2d(pool3_3_3, kernel_size=3, strides=1, name='inception_4a_pool') inception_4a_pool_1_1 = conv_2d(inception_4a_pool, 64, filter_size=1, activation='relu', name='inception_4a_pool_1_1') inception_4a_output = merge([inception_4a_1_1, inception_4a_3_3, inception_4a_5_5, inception_4a_pool_1_1], mode='concat', axis=3, name='inception_4a_output') pool5_7_7 = avg_pool_2d(inception_4a_output, kernel_size=5, strides=1) pool5_7_7 = dropout(pool5_7_7, 0.4) loss = fully_connected(pool5_7_7, 2,activation='softmax') # if training then add training hyperparameters if(training): network = regression(loss, optimizer='momentum', loss='categorical_crossentropy', learning_rate=0.001) else: network = loss; model = tflearn.DNN(network, checkpoint_path='inceptiononv1onfire', max_checkpoints=1, tensorboard_verbose=2) return model
name='inception_5b_5_5') inception_5b_pool = max_pool_2d(inception_5a_output, kernel_size=3, strides=1, name='inception_5b_pool') inception_5b_pool_1_1 = conv_2d(inception_5b_pool, 128, filter_size=1, activation='relu', name='inception_5b_pool_1_1') inception_5b_output = merge([ inception_5b_1_1, inception_5b_3_3, inception_5b_5_5, inception_5b_pool_1_1 ], axis=3, mode='concat') pool5_7_7 = avg_pool_2d(inception_5b_output, kernel_size=7, strides=1) pool5_7_7 = dropout(pool5_7_7, 0.4) # fc loss = fully_connected(pool5_7_7, 17, activation='softmax') network = regression(loss, optimizer='momentum', loss='categorical_crossentropy', learning_rate=0.001) # to train model = tflearn.DNN(network, checkpoint_path='model_googlenet', max_checkpoints=1, tensorboard_verbose=2)
def construct_inceptionv1onfire (x,y): # Build network as per architecture in [Dunnings/Breckon, 2018] network = input_data(shape=[None, y, x, 3]) conv1_7_7 = conv_2d(network, 64, 5, strides=2, activation='relu', name = 'conv1_7_7_s2') pool1_3_3 = max_pool_2d(conv1_7_7, 3,strides=2) pool1_3_3 = local_response_normalization(pool1_3_3) conv2_3_3_reduce = conv_2d(pool1_3_3, 64,1, activation='relu',name = 'conv2_3_3_reduce') conv2_3_3 = conv_2d(conv2_3_3_reduce, 128,3, activation='relu', name='conv2_3_3') conv2_3_3 = local_response_normalization(conv2_3_3) pool2_3_3 = max_pool_2d(conv2_3_3, kernel_size=3, strides=2, name='pool2_3_3_s2') inception_3a_1_1 = conv_2d(pool2_3_3, 64, 1, activation='relu', name='inception_3a_1_1') inception_3a_3_3_reduce = conv_2d(pool2_3_3, 96,1, activation='relu', name='inception_3a_3_3_reduce') inception_3a_3_3 = conv_2d(inception_3a_3_3_reduce, 128,filter_size=3, activation='relu', name = 'inception_3a_3_3') inception_3a_5_5_reduce = conv_2d(pool2_3_3,16, filter_size=1,activation='relu', name ='inception_3a_5_5_reduce' ) inception_3a_5_5 = conv_2d(inception_3a_5_5_reduce, 32, filter_size=5, activation='relu', name= 'inception_3a_5_5') inception_3a_pool = max_pool_2d(pool2_3_3, kernel_size=3, strides=1, ) inception_3a_pool_1_1 = conv_2d(inception_3a_pool, 32, filter_size=1, activation='relu', name='inception_3a_pool_1_1') # merge the inception_3a__ inception_3a_output = merge([inception_3a_1_1, inception_3a_3_3, inception_3a_5_5, inception_3a_pool_1_1], mode='concat', axis=3) inception_3b_1_1 = conv_2d(inception_3a_output, 128,filter_size=1,activation='relu', name= 'inception_3b_1_1' ) inception_3b_3_3_reduce = conv_2d(inception_3a_output, 128, filter_size=1, activation='relu', name='inception_3b_3_3_reduce') inception_3b_3_3 = conv_2d(inception_3b_3_3_reduce, 192, filter_size=3, activation='relu',name='inception_3b_3_3') inception_3b_5_5_reduce = conv_2d(inception_3a_output, 32, filter_size=1, activation='relu', name = 'inception_3b_5_5_reduce') inception_3b_5_5 = conv_2d(inception_3b_5_5_reduce, 96, filter_size=5, name = 'inception_3b_5_5') inception_3b_pool = max_pool_2d(inception_3a_output, kernel_size=3, strides=1, name='inception_3b_pool') inception_3b_pool_1_1 = conv_2d(inception_3b_pool, 64, filter_size=1,activation='relu', name='inception_3b_pool_1_1') #merge the inception_3b_* inception_3b_output = merge([inception_3b_1_1, inception_3b_3_3, inception_3b_5_5, inception_3b_pool_1_1], mode='concat',axis=3,name='inception_3b_output') pool3_3_3 = max_pool_2d(inception_3b_output, kernel_size=3, strides=2, name='pool3_3_3') inception_4a_1_1 = conv_2d(pool3_3_3, 192, filter_size=1, activation='relu', name='inception_4a_1_1') inception_4a_3_3_reduce = conv_2d(pool3_3_3, 96, filter_size=1, activation='relu', name='inception_4a_3_3_reduce') inception_4a_3_3 = conv_2d(inception_4a_3_3_reduce, 208, filter_size=3, activation='relu', name='inception_4a_3_3') inception_4a_5_5_reduce = conv_2d(pool3_3_3, 16, filter_size=1, activation='relu', name='inception_4a_5_5_reduce') inception_4a_5_5 = conv_2d(inception_4a_5_5_reduce, 48, filter_size=5, activation='relu', name='inception_4a_5_5') inception_4a_pool = max_pool_2d(pool3_3_3, kernel_size=3, strides=1, name='inception_4a_pool') inception_4a_pool_1_1 = conv_2d(inception_4a_pool, 64, filter_size=1, activation='relu', name='inception_4a_pool_1_1') inception_4a_output = merge([inception_4a_1_1, inception_4a_3_3, inception_4a_5_5, inception_4a_pool_1_1], mode='concat', axis=3, name='inception_4a_output') pool5_7_7 = avg_pool_2d(inception_4a_output, kernel_size=5, strides=1) pool5_7_7 = dropout(pool5_7_7, 0.4) loss = fully_connected(pool5_7_7, 2,activation='softmax') network = regression(loss, optimizer='momentum', loss='categorical_crossentropy', learning_rate=0.001) model = tflearn.DNN(network, checkpoint_path='inceptiononv1onfire', max_checkpoints=1, tensorboard_verbose=2) return model
'''
def _model4(): # Taken from TFLearn examples and based on Googles Inception. DO NOT RUN!!!! global yTest, img_aug tf.reset_default_graph() img_prep = ImagePreprocessing() img_prep.add_featurewise_zero_center() img_prep.add_featurewise_stdnorm() network = input_data(shape=[None, inputSize, inputSize, dim], name='input', data_preprocessing=img_prep, data_augmentation=img_aug) conv1_7_7 = conv_2d(network, 64, 7, strides=2, activation='relu', name='conv1_7_7_s2') pool1_3_3 = max_pool_2d(conv1_7_7, 3, strides=2) pool1_3_3 = local_response_normalization(pool1_3_3) conv2_3_3_reduce = conv_2d(pool1_3_3, 64, 1, activation='relu', name='conv2_3_3_reduce') conv2_3_3 = conv_2d(conv2_3_3_reduce, 192, 3, activation='relu', name='conv2_3_3') conv2_3_3 = local_response_normalization(conv2_3_3) pool2_3_3 = max_pool_2d(conv2_3_3, kernel_size=3, strides=2, name='pool2_3_3_s2') # 3a inception_3a_1_1 = conv_2d(pool2_3_3, 64, 1, activation='relu', name='inception_3a_1_1') inception_3a_3_3_reduce = conv_2d(pool2_3_3, 96, 1, activation='relu', name='inception_3a_3_3_reduce') inception_3a_3_3 = conv_2d(inception_3a_3_3_reduce, 128, filter_size=3, activation='relu', name='inception_3a_3_3') inception_3a_5_5_reduce = conv_2d(pool2_3_3, 16, filter_size=1, activation='relu', name='inception_3a_5_5_reduce') inception_3a_5_5 = conv_2d(inception_3a_5_5_reduce, 32, filter_size=5, activation='relu', name='inception_3a_5_5') inception_3a_pool = max_pool_2d(pool2_3_3, kernel_size=3, strides=1, name='inception_3a_pool') inception_3a_pool_1_1 = conv_2d(inception_3a_pool, 32, filter_size=1, activation='relu', name='inception_3a_pool_1_1') inception_3a_output = merge([inception_3a_1_1, inception_3a_3_3, inception_3a_5_5, inception_3a_pool_1_1], mode='concat', axis=3) # 3b inception_3b_1_1 = conv_2d(inception_3a_output, 128, filter_size=1, activation='relu', name='inception_3b_1_1') inception_3b_3_3_reduce = conv_2d(inception_3a_output, 128, filter_size=1, activation='relu', name='inception_3b_3_3_reduce') inception_3b_3_3 = conv_2d(inception_3b_3_3_reduce, 192, filter_size=3, activation='relu', name='inception_3b_3_3') inception_3b_5_5_reduce = conv_2d(inception_3a_output, 32, filter_size=1, activation='relu', name='inception_3b_5_5_reduce') inception_3b_5_5 = conv_2d(inception_3b_5_5_reduce, 96, filter_size=5, name='inception_3b_5_5') inception_3b_pool = max_pool_2d(inception_3a_output, kernel_size=3, strides=1, name='inception_3b_pool') inception_3b_pool_1_1 = conv_2d(inception_3b_pool, 64, filter_size=1, activation='relu', name='inception_3b_pool_1_1') inception_3b_output = merge([inception_3b_1_1, inception_3b_3_3, inception_3b_5_5, inception_3b_pool_1_1], mode='concat', axis=3, name='inception_3b_output') pool3_3_3 = max_pool_2d(inception_3b_output, kernel_size=3, strides=2, name='pool3_3_3') # 4a inception_4a_1_1 = conv_2d(pool3_3_3, 192, filter_size=1, activation='relu', name='inception_4a_1_1') inception_4a_3_3_reduce = conv_2d(pool3_3_3, 96, filter_size=1, activation='relu', name='inception_4a_3_3_reduce') inception_4a_3_3 = conv_2d(inception_4a_3_3_reduce, 208, filter_size=3, activation='relu', name='inception_4a_3_3') inception_4a_5_5_reduce = conv_2d(pool3_3_3, 16, filter_size=1, activation='relu', name='inception_4a_5_5_reduce') inception_4a_5_5 = conv_2d(inception_4a_5_5_reduce, 48, filter_size=5, activation='relu', name='inception_4a_5_5') inception_4a_pool = max_pool_2d(pool3_3_3, kernel_size=3, strides=1, name='inception_4a_pool') inception_4a_pool_1_1 = conv_2d(inception_4a_pool, 64, filter_size=1, activation='relu', name='inception_4a_pool_1_1') inception_4a_output = merge([inception_4a_1_1, inception_4a_3_3, inception_4a_5_5, inception_4a_pool_1_1], mode='concat', axis=3, name='inception_4a_output') # 4b inception_4b_1_1 = conv_2d(inception_4a_output, 160, filter_size=1, activation='relu', name='inception_4a_1_1') inception_4b_3_3_reduce = conv_2d(inception_4a_output, 112, filter_size=1, activation='relu', name='inception_4b_3_3_reduce') inception_4b_3_3 = conv_2d(inception_4b_3_3_reduce, 224, filter_size=3, activation='relu', name='inception_4b_3_3') inception_4b_5_5_reduce = conv_2d(inception_4a_output, 24, filter_size=1, activation='relu', name='inception_4b_5_5_reduce') inception_4b_5_5 = conv_2d(inception_4b_5_5_reduce, 64, filter_size=5, activation='relu', name='inception_4b_5_5') inception_4b_pool = max_pool_2d(inception_4a_output, kernel_size=3, strides=1, name='inception_4b_pool') inception_4b_pool_1_1 = conv_2d(inception_4b_pool, 64, filter_size=1, activation='relu', name='inception_4b_pool_1_1') inception_4b_output = merge([inception_4b_1_1, inception_4b_3_3, inception_4b_5_5, inception_4b_pool_1_1], mode='concat', axis=3, name='inception_4b_output') # 4c inception_4c_1_1 = conv_2d(inception_4b_output, 128, filter_size=1, activation='relu', name='inception_4c_1_1') inception_4c_3_3_reduce = conv_2d(inception_4b_output, 128, filter_size=1, activation='relu', name='inception_4c_3_3_reduce') inception_4c_3_3 = conv_2d(inception_4c_3_3_reduce, 256, filter_size=3, activation='relu', name='inception_4c_3_3') inception_4c_5_5_reduce = conv_2d(inception_4b_output, 24, filter_size=1, activation='relu', name='inception_4c_5_5_reduce') inception_4c_5_5 = conv_2d(inception_4c_5_5_reduce, 64, filter_size=5, activation='relu', name='inception_4c_5_5') inception_4c_pool = max_pool_2d(inception_4b_output, kernel_size=3, strides=1) inception_4c_pool_1_1 = conv_2d(inception_4c_pool, 64, filter_size=1, activation='relu', name='inception_4c_pool_1_1') inception_4c_output = merge([inception_4c_1_1, inception_4c_3_3, inception_4c_5_5, inception_4c_pool_1_1], mode='concat', axis=3, name='inception_4c_output') # 4d inception_4d_1_1 = conv_2d(inception_4c_output, 112, filter_size=1, activation='relu', name='inception_4d_1_1') inception_4d_3_3_reduce = conv_2d(inception_4c_output, 144, filter_size=1, activation='relu', name='inception_4d_3_3_reduce') inception_4d_3_3 = conv_2d(inception_4d_3_3_reduce, 288, filter_size=3, activation='relu', name='inception_4d_3_3') inception_4d_5_5_reduce = conv_2d(inception_4c_output, 32, filter_size=1, activation='relu', name='inception_4d_5_5_reduce') inception_4d_5_5 = conv_2d(inception_4d_5_5_reduce, 64, filter_size=5, activation='relu', name='inception_4d_5_5') inception_4d_pool = max_pool_2d(inception_4c_output, kernel_size=3, strides=1, name='inception_4d_pool') inception_4d_pool_1_1 = conv_2d(inception_4d_pool, 64, filter_size=1, activation='relu', name='inception_4d_pool_1_1') inception_4d_output = merge([inception_4d_1_1, inception_4d_3_3, inception_4d_5_5, inception_4d_pool_1_1], mode='concat', axis=3, name='inception_4d_output') # 4e inception_4e_1_1 = conv_2d(inception_4d_output, 256, filter_size=1, activation='relu', name='inception_4e_1_1') inception_4e_3_3_reduce = conv_2d(inception_4d_output, 160, filter_size=1, activation='relu', name='inception_4e_3_3_reduce') inception_4e_3_3 = conv_2d(inception_4e_3_3_reduce, 320, filter_size=3, activation='relu', name='inception_4e_3_3') inception_4e_5_5_reduce = conv_2d(inception_4d_output, 32, filter_size=1, activation='relu', name='inception_4e_5_5_reduce') inception_4e_5_5 = conv_2d(inception_4e_5_5_reduce, 128, filter_size=5, activation='relu', name='inception_4e_5_5') inception_4e_pool = max_pool_2d(inception_4d_output, kernel_size=3, strides=1, name='inception_4e_pool') inception_4e_pool_1_1 = conv_2d(inception_4e_pool, 128, filter_size=1, activation='relu', name='inception_4e_pool_1_1') inception_4e_output = merge([inception_4e_1_1, inception_4e_3_3, inception_4e_5_5, inception_4e_pool_1_1], axis=3, mode='concat') pool4_3_3 = max_pool_2d(inception_4e_output, kernel_size=3, strides=2, name='pool_3_3') # 5a inception_5a_1_1 = conv_2d(pool4_3_3, 256, filter_size=1, activation='relu', name='inception_5a_1_1') inception_5a_3_3_reduce = conv_2d(pool4_3_3, 160, filter_size=1, activation='relu', name='inception_5a_3_3_reduce') inception_5a_3_3 = conv_2d(inception_5a_3_3_reduce, 320, filter_size=3, activation='relu', name='inception_5a_3_3') inception_5a_5_5_reduce = conv_2d(pool4_3_3, 32, filter_size=1, activation='relu', name='inception_5a_5_5_reduce') inception_5a_5_5 = conv_2d(inception_5a_5_5_reduce, 128, filter_size=5, activation='relu', name='inception_5a_5_5') inception_5a_pool = max_pool_2d(pool4_3_3, kernel_size=3, strides=1, name='inception_5a_pool') inception_5a_pool_1_1 = conv_2d(inception_5a_pool, 128, filter_size=1, activation='relu', name='inception_5a_pool_1_1') inception_5a_output = merge([inception_5a_1_1, inception_5a_3_3, inception_5a_5_5, inception_5a_pool_1_1], axis=3, mode='concat') # 5b inception_5b_1_1 = conv_2d(inception_5a_output, 384, filter_size=1, activation='relu', name='inception_5b_1_1') inception_5b_3_3_reduce = conv_2d(inception_5a_output, 192, filter_size=1, activation='relu', name='inception_5b_3_3_reduce') inception_5b_3_3 = conv_2d(inception_5b_3_3_reduce, 384, filter_size=3, activation='relu', name='inception_5b_3_3') inception_5b_5_5_reduce = conv_2d(inception_5a_output, 48, filter_size=1, activation='relu', name='inception_5b_5_5_reduce') inception_5b_5_5 = conv_2d(inception_5b_5_5_reduce, 128, filter_size=5, activation='relu', name='inception_5b_5_5') inception_5b_pool = max_pool_2d(inception_5a_output, kernel_size=3, strides=1, name='inception_5b_pool') inception_5b_pool_1_1 = conv_2d(inception_5b_pool, 128, filter_size=1, activation='relu', name='inception_5b_pool_1_1') inception_5b_output = merge([inception_5b_1_1, inception_5b_3_3, inception_5b_5_5, inception_5b_pool_1_1], axis=3, mode='concat') pool5_7_7 = avg_pool_2d(inception_5b_output, kernel_size=7, strides=1) pool5_7_7 = dropout(pool5_7_7, 0.4) # fc loss = fully_connected(pool5_7_7, len(Y[0]), activation='softmax') network = regression(loss, optimizer='momentum', loss='categorical_crossentropy', learning_rate=0.001) # to train model = tflearn.DNN(network, checkpoint_path='model_googlenet', max_checkpoints=1, tensorboard_verbose=2) model.fit(X, Y, n_epoch=epochNum, validation_set=(xTest, yTest), shuffle=True, show_metric=True, batch_size=batchNum, snapshot_step=200, snapshot_epoch=False, run_id='googlenet_oxflowers17') if modelStore: model.save(_id + '-model.tflearn')
def googlenet(width, height, frame_count, lr, output=6, model_name='sentnet_color.model'): tflearn.init_graph() with tf.device('/gpu:0'): network = input_data(shape=[None, width, height, 3], name='input') conv1_7_7 = conv_2d(network, 64, 28, strides=4, activation='relu', name='conv1_7_7_s2') pool1_3_3 = max_pool_2d(conv1_7_7, 9, strides=4) pool1_3_3 = local_response_normalization(pool1_3_3) conv2_3_3_reduce = conv_2d(pool1_3_3, 64, 1, activation='relu', name='conv2_3_3_reduce') conv2_3_3 = conv_2d(conv2_3_3_reduce, 192, 12, activation='relu', name='conv2_3_3') conv2_3_3 = local_response_normalization(conv2_3_3) pool2_3_3 = max_pool_2d(conv2_3_3, kernel_size=12, strides=2, name='pool2_3_3_s2') inception_3a_1_1 = conv_2d(pool2_3_3, 64, 1, activation='relu', name='inception_3a_1_1') inception_3a_3_3_reduce = conv_2d(pool2_3_3, 96, 1, activation='relu', name='inception_3a_3_3_reduce') inception_3a_3_3 = conv_2d(inception_3a_3_3_reduce, 128, filter_size=12, activation='relu', name='inception_3a_3_3') inception_3a_5_5_reduce = conv_2d(pool2_3_3, 16, filter_size=1, activation='relu', name='inception_3a_5_5_reduce') inception_3a_5_5 = conv_2d(inception_3a_5_5_reduce, 32, filter_size=15, activation='relu', name='inception_3a_5_5') inception_3a_pool = max_pool_2d( pool2_3_3, kernel_size=12, strides=1, ) inception_3a_pool_1_1 = conv_2d(inception_3a_pool, 32, filter_size=1, activation='relu', name='inception_3a_pool_1_1') # merge the inception_3a__ inception_3a_output = merge([ inception_3a_1_1, inception_3a_3_3, inception_3a_5_5, inception_3a_pool_1_1 ], mode='concat', axis=3) inception_3b_1_1 = conv_2d(inception_3a_output, 128, filter_size=1, activation='relu', name='inception_3b_1_1') inception_3b_3_3_reduce = conv_2d(inception_3a_output, 128, filter_size=1, activation='relu', name='inception_3b_3_3_reduce') inception_3b_3_3 = conv_2d(inception_3b_3_3_reduce, 192, filter_size=9, activation='relu', name='inception_3b_3_3') inception_3b_5_5_reduce = conv_2d(inception_3a_output, 32, filter_size=1, activation='relu', name='inception_3b_5_5_reduce') inception_3b_5_5 = conv_2d(inception_3b_5_5_reduce, 96, filter_size=15, name='inception_3b_5_5') inception_3b_pool = max_pool_2d(inception_3a_output, kernel_size=12, strides=1, name='inception_3b_pool') inception_3b_pool_1_1 = conv_2d(inception_3b_pool, 64, filter_size=1, activation='relu', name='inception_3b_pool_1_1') #merge the inception_3b_* inception_3b_output = merge([ inception_3b_1_1, inception_3b_3_3, inception_3b_5_5, inception_3b_pool_1_1 ], mode='concat', axis=3, name='inception_3b_output') pool3_3_3 = max_pool_2d(inception_3b_output, kernel_size=3, strides=2, name='pool3_3_3') inception_4a_1_1 = conv_2d(pool3_3_3, 192, filter_size=1, activation='relu', name='inception_4a_1_1') inception_4a_3_3_reduce = conv_2d(pool3_3_3, 96, filter_size=1, activation='relu', name='inception_4a_3_3_reduce') inception_4a_3_3 = conv_2d(inception_4a_3_3_reduce, 208, filter_size=3, activation='relu', name='inception_4a_3_3') inception_4a_5_5_reduce = conv_2d(pool3_3_3, 16, filter_size=1, activation='relu', name='inception_4a_5_5_reduce') inception_4a_5_5 = conv_2d(inception_4a_5_5_reduce, 48, filter_size=5, activation='relu', name='inception_4a_5_5') inception_4a_pool = max_pool_2d(pool3_3_3, kernel_size=3, strides=1, name='inception_4a_pool') inception_4a_pool_1_1 = conv_2d(inception_4a_pool, 64, filter_size=1, activation='relu', name='inception_4a_pool_1_1') inception_4a_output = merge([ inception_4a_1_1, inception_4a_3_3, inception_4a_5_5, inception_4a_pool_1_1 ], mode='concat', axis=3, name='inception_4a_output') inception_4b_1_1 = conv_2d(inception_4a_output, 160, filter_size=1, activation='relu', name='inception_4a_1_1') inception_4b_3_3_reduce = conv_2d(inception_4a_output, 112, filter_size=1, activation='relu', name='inception_4b_3_3_reduce') inception_4b_3_3 = conv_2d(inception_4b_3_3_reduce, 224, filter_size=3, activation='relu', name='inception_4b_3_3') inception_4b_5_5_reduce = conv_2d(inception_4a_output, 24, filter_size=1, activation='relu', name='inception_4b_5_5_reduce') inception_4b_5_5 = conv_2d(inception_4b_5_5_reduce, 64, filter_size=5, activation='relu', name='inception_4b_5_5') inception_4b_pool = max_pool_2d(inception_4a_output, kernel_size=3, strides=1, name='inception_4b_pool') inception_4b_pool_1_1 = conv_2d(inception_4b_pool, 64, filter_size=1, activation='relu', name='inception_4b_pool_1_1') inception_4b_output = merge([ inception_4b_1_1, inception_4b_3_3, inception_4b_5_5, inception_4b_pool_1_1 ], mode='concat', axis=3, name='inception_4b_output') inception_4c_1_1 = conv_2d(inception_4b_output, 128, filter_size=1, activation='relu', name='inception_4c_1_1') inception_4c_3_3_reduce = conv_2d(inception_4b_output, 128, filter_size=1, activation='relu', name='inception_4c_3_3_reduce') inception_4c_3_3 = conv_2d(inception_4c_3_3_reduce, 256, filter_size=3, activation='relu', name='inception_4c_3_3') inception_4c_5_5_reduce = conv_2d(inception_4b_output, 24, filter_size=1, activation='relu', name='inception_4c_5_5_reduce') inception_4c_5_5 = conv_2d(inception_4c_5_5_reduce, 64, filter_size=5, activation='relu', name='inception_4c_5_5') inception_4c_pool = max_pool_2d(inception_4b_output, kernel_size=3, strides=1) inception_4c_pool_1_1 = conv_2d(inception_4c_pool, 64, filter_size=1, activation='relu', name='inception_4c_pool_1_1') inception_4c_output = merge([ inception_4c_1_1, inception_4c_3_3, inception_4c_5_5, inception_4c_pool_1_1 ], mode='concat', axis=3, name='inception_4c_output') inception_4d_1_1 = conv_2d(inception_4c_output, 112, filter_size=1, activation='relu', name='inception_4d_1_1') inception_4d_3_3_reduce = conv_2d(inception_4c_output, 144, filter_size=1, activation='relu', name='inception_4d_3_3_reduce') inception_4d_3_3 = conv_2d(inception_4d_3_3_reduce, 288, filter_size=3, activation='relu', name='inception_4d_3_3') inception_4d_5_5_reduce = conv_2d(inception_4c_output, 32, filter_size=1, activation='relu', name='inception_4d_5_5_reduce') inception_4d_5_5 = conv_2d(inception_4d_5_5_reduce, 64, filter_size=5, activation='relu', name='inception_4d_5_5') inception_4d_pool = max_pool_2d(inception_4c_output, kernel_size=3, strides=1, name='inception_4d_pool') inception_4d_pool_1_1 = conv_2d(inception_4d_pool, 64, filter_size=1, activation='relu', name='inception_4d_pool_1_1') inception_4d_output = merge([ inception_4d_1_1, inception_4d_3_3, inception_4d_5_5, inception_4d_pool_1_1 ], mode='concat', axis=3, name='inception_4d_output') inception_4e_1_1 = conv_2d(inception_4d_output, 256, filter_size=1, activation='relu', name='inception_4e_1_1') inception_4e_3_3_reduce = conv_2d(inception_4d_output, 160, filter_size=1, activation='relu', name='inception_4e_3_3_reduce') inception_4e_3_3 = conv_2d(inception_4e_3_3_reduce, 320, filter_size=3, activation='relu', name='inception_4e_3_3') inception_4e_5_5_reduce = conv_2d(inception_4d_output, 32, filter_size=1, activation='relu', name='inception_4e_5_5_reduce') inception_4e_5_5 = conv_2d(inception_4e_5_5_reduce, 128, filter_size=5, activation='relu', name='inception_4e_5_5') inception_4e_pool = max_pool_2d(inception_4d_output, kernel_size=3, strides=1, name='inception_4e_pool') inception_4e_pool_1_1 = conv_2d(inception_4e_pool, 128, filter_size=1, activation='relu', name='inception_4e_pool_1_1') inception_4e_output = merge([ inception_4e_1_1, inception_4e_3_3, inception_4e_5_5, inception_4e_pool_1_1 ], axis=3, mode='concat') pool4_3_3 = max_pool_2d(inception_4e_output, kernel_size=3, strides=2, name='pool_3_3') inception_5a_1_1 = conv_2d(pool4_3_3, 256, filter_size=1, activation='relu', name='inception_5a_1_1') inception_5a_3_3_reduce = conv_2d(pool4_3_3, 160, filter_size=1, activation='relu', name='inception_5a_3_3_reduce') inception_5a_3_3 = conv_2d(inception_5a_3_3_reduce, 320, filter_size=3, activation='relu', name='inception_5a_3_3') inception_5a_5_5_reduce = conv_2d(pool4_3_3, 32, filter_size=1, activation='relu', name='inception_5a_5_5_reduce') inception_5a_5_5 = conv_2d(inception_5a_5_5_reduce, 128, filter_size=5, activation='relu', name='inception_5a_5_5') inception_5a_pool = max_pool_2d(pool4_3_3, kernel_size=3, strides=1, name='inception_5a_pool') inception_5a_pool_1_1 = conv_2d(inception_5a_pool, 128, filter_size=1, activation='relu', name='inception_5a_pool_1_1') inception_5a_output = merge([ inception_5a_1_1, inception_5a_3_3, inception_5a_5_5, inception_5a_pool_1_1 ], axis=3, mode='concat') inception_5b_1_1 = conv_2d(inception_5a_output, 384, filter_size=1, activation='relu', name='inception_5b_1_1') inception_5b_3_3_reduce = conv_2d(inception_5a_output, 192, filter_size=1, activation='relu', name='inception_5b_3_3_reduce') inception_5b_3_3 = conv_2d(inception_5b_3_3_reduce, 384, filter_size=3, activation='relu', name='inception_5b_3_3') inception_5b_5_5_reduce = conv_2d(inception_5a_output, 48, filter_size=1, activation='relu', name='inception_5b_5_5_reduce') inception_5b_5_5 = conv_2d(inception_5b_5_5_reduce, 128, filter_size=5, activation='relu', name='inception_5b_5_5') inception_5b_pool = max_pool_2d(inception_5a_output, kernel_size=3, strides=1, name='inception_5b_pool') inception_5b_pool_1_1 = conv_2d(inception_5b_pool, 128, filter_size=1, activation='relu', name='inception_5b_pool_1_1') inception_5b_output = merge([ inception_5b_1_1, inception_5b_3_3, inception_5b_5_5, inception_5b_pool_1_1 ], axis=3, mode='concat') pool5_7_7 = avg_pool_2d(inception_5b_output, kernel_size=7, strides=1) pool5_7_7 = dropout(pool5_7_7, 0.4) loss = fully_connected(pool5_7_7, output, activation='softmax') network = regression(loss, optimizer='momentum', loss='categorical_crossentropy', learning_rate=lr, name='targets') model = tflearn.DNN(network, max_checkpoints=0, tensorboard_verbose=0, tensorboard_dir='log') return model
def build_le_network(self, network): """ Build google net. (google inception v3) Args: network: Returns: """ from tflearn.layers.merge_ops import merge conv1_7_7 = conv_2d(network, 64, 7, strides=2, activation='relu', name='conv1_7_7_s2') pool1_3_3 = max_pool_2d(conv1_7_7, 3, strides=2) pool1_3_3 = local_response_normalization(pool1_3_3) conv2_3_3_reduce = conv_2d(pool1_3_3, 64, 1, activation='relu', name='conv2_3_3_reduce') conv2_3_3 = conv_2d(conv2_3_3_reduce, 192, 3, activation='relu', name='conv2_3_3') conv2_3_3 = local_response_normalization(conv2_3_3) pool2_3_3 = max_pool_2d(conv2_3_3, kernel_size=3, strides=2, name='pool2_3_3_s2') inception_3a_1_1 = conv_2d(pool2_3_3, 64, 1, activation='relu', name='inception_3a_1_1') inception_3a_3_3_reduce = conv_2d(pool2_3_3, 96, 1, activation='relu', name='inception_3a_3_3_reduce') inception_3a_3_3 = conv_2d(inception_3a_3_3_reduce, 128, filter_size=3, activation='relu', name='inception_3a_3_3') inception_3a_5_5_reduce = conv_2d(pool2_3_3, 16, filter_size=1, activation='relu', name='inception_3a_5_5_reduce') inception_3a_5_5 = conv_2d(inception_3a_5_5_reduce, 32, filter_size=5, activation='relu', name='inception_3a_5_5') inception_3a_pool = max_pool_2d(pool2_3_3, kernel_size=3, strides=1, ) inception_3a_pool_1_1 = conv_2d(inception_3a_pool, 32, filter_size=1, activation='relu', name='inception_3a_pool_1_1') # merge the inception_3a__ inception_3a_output = merge( [inception_3a_1_1, inception_3a_3_3, inception_3a_5_5, inception_3a_pool_1_1], mode='concat', axis=3) inception_3b_1_1 = conv_2d(inception_3a_output, 128, filter_size=1, activation='relu', name='inception_3b_1_1') inception_3b_3_3_reduce = conv_2d(inception_3a_output, 128, filter_size=1, activation='relu', name='inception_3b_3_3_reduce') inception_3b_3_3 = conv_2d(inception_3b_3_3_reduce, 192, filter_size=3, activation='relu', name='inception_3b_3_3') inception_3b_5_5_reduce = conv_2d(inception_3a_output, 32, filter_size=1, activation='relu', name='inception_3b_5_5_reduce') inception_3b_5_5 = conv_2d(inception_3b_5_5_reduce, 96, filter_size=5, name='inception_3b_5_5') inception_3b_pool = max_pool_2d(inception_3a_output, kernel_size=3, strides=1, name='inception_3b_pool') inception_3b_pool_1_1 = conv_2d(inception_3b_pool, 64, filter_size=1, activation='relu', name='inception_3b_pool_1_1') # merge the inception_3b_* inception_3b_output = merge( [inception_3b_1_1, inception_3b_3_3, inception_3b_5_5, inception_3b_pool_1_1], mode='concat', axis=3, name='inception_3b_output') pool3_3_3 = max_pool_2d(inception_3b_output, kernel_size=3, strides=2, name='pool3_3_3') inception_4a_1_1 = conv_2d(pool3_3_3, 192, filter_size=1, activation='relu', name='inception_4a_1_1') inception_4a_3_3_reduce = conv_2d(pool3_3_3, 96, filter_size=1, activation='relu', name='inception_4a_3_3_reduce') inception_4a_3_3 = conv_2d(inception_4a_3_3_reduce, 208, filter_size=3, activation='relu', name='inception_4a_3_3') inception_4a_5_5_reduce = conv_2d(pool3_3_3, 16, filter_size=1, activation='relu', name='inception_4a_5_5_reduce') inception_4a_5_5 = conv_2d(inception_4a_5_5_reduce, 48, filter_size=5, activation='relu', name='inception_4a_5_5') inception_4a_pool = max_pool_2d(pool3_3_3, kernel_size=3, strides=1, name='inception_4a_pool') inception_4a_pool_1_1 = conv_2d(inception_4a_pool, 64, filter_size=1, activation='relu', name='inception_4a_pool_1_1') inception_4a_output = merge( [inception_4a_1_1, inception_4a_3_3, inception_4a_5_5, inception_4a_pool_1_1], mode='concat', axis=3, name='inception_4a_output') inception_4b_1_1 = conv_2d(inception_4a_output, 160, filter_size=1, activation='relu', name='inception_4a_1_1') inception_4b_3_3_reduce = conv_2d(inception_4a_output, 112, filter_size=1, activation='relu', name='inception_4b_3_3_reduce') inception_4b_3_3 = conv_2d(inception_4b_3_3_reduce, 224, filter_size=3, activation='relu', name='inception_4b_3_3') inception_4b_5_5_reduce = conv_2d(inception_4a_output, 24, filter_size=1, activation='relu', name='inception_4b_5_5_reduce') inception_4b_5_5 = conv_2d(inception_4b_5_5_reduce, 64, filter_size=5, activation='relu', name='inception_4b_5_5') inception_4b_pool = max_pool_2d(inception_4a_output, kernel_size=3, strides=1, name='inception_4b_pool') inception_4b_pool_1_1 = conv_2d(inception_4b_pool, 64, filter_size=1, activation='relu', name='inception_4b_pool_1_1') inception_4b_output = merge( [inception_4b_1_1, inception_4b_3_3, inception_4b_5_5, inception_4b_pool_1_1], mode='concat', axis=3, name='inception_4b_output') inception_4c_1_1 = conv_2d(inception_4b_output, 128, filter_size=1, activation='relu', name='inception_4c_1_1') inception_4c_3_3_reduce = conv_2d(inception_4b_output, 128, filter_size=1, activation='relu', name='inception_4c_3_3_reduce') inception_4c_3_3 = conv_2d(inception_4c_3_3_reduce, 256, filter_size=3, activation='relu', name='inception_4c_3_3') inception_4c_5_5_reduce = conv_2d(inception_4b_output, 24, filter_size=1, activation='relu', name='inception_4c_5_5_reduce') inception_4c_5_5 = conv_2d(inception_4c_5_5_reduce, 64, filter_size=5, activation='relu', name='inception_4c_5_5') inception_4c_pool = max_pool_2d(inception_4b_output, kernel_size=3, strides=1) inception_4c_pool_1_1 = conv_2d(inception_4c_pool, 64, filter_size=1, activation='relu', name='inception_4c_pool_1_1') inception_4c_output = merge( [inception_4c_1_1, inception_4c_3_3, inception_4c_5_5, inception_4c_pool_1_1], mode='concat', axis=3, name='inception_4c_output') inception_4d_1_1 = conv_2d(inception_4c_output, 112, filter_size=1, activation='relu', name='inception_4d_1_1') inception_4d_3_3_reduce = conv_2d(inception_4c_output, 144, filter_size=1, activation='relu', name='inception_4d_3_3_reduce') inception_4d_3_3 = conv_2d(inception_4d_3_3_reduce, 288, filter_size=3, activation='relu', name='inception_4d_3_3') inception_4d_5_5_reduce = conv_2d(inception_4c_output, 32, filter_size=1, activation='relu', name='inception_4d_5_5_reduce') inception_4d_5_5 = conv_2d(inception_4d_5_5_reduce, 64, filter_size=5, activation='relu', name='inception_4d_5_5') inception_4d_pool = max_pool_2d(inception_4c_output, kernel_size=3, strides=1, name='inception_4d_pool') inception_4d_pool_1_1 = conv_2d(inception_4d_pool, 64, filter_size=1, activation='relu', name='inception_4d_pool_1_1') inception_4d_output = merge( [inception_4d_1_1, inception_4d_3_3, inception_4d_5_5, inception_4d_pool_1_1], mode='concat', axis=3, name='inception_4d_output') inception_4e_1_1 = conv_2d(inception_4d_output, 256, filter_size=1, activation='relu', name='inception_4e_1_1') inception_4e_3_3_reduce = conv_2d(inception_4d_output, 160, filter_size=1, activation='relu', name='inception_4e_3_3_reduce') inception_4e_3_3 = conv_2d(inception_4e_3_3_reduce, 320, filter_size=3, activation='relu', name='inception_4e_3_3') inception_4e_5_5_reduce = conv_2d(inception_4d_output, 32, filter_size=1, activation='relu', name='inception_4e_5_5_reduce') inception_4e_5_5 = conv_2d(inception_4e_5_5_reduce, 128, filter_size=5, activation='relu', name='inception_4e_5_5') inception_4e_pool = max_pool_2d(inception_4d_output, kernel_size=3, strides=1, name='inception_4e_pool') inception_4e_pool_1_1 = conv_2d(inception_4e_pool, 128, filter_size=1, activation='relu', name='inception_4e_pool_1_1') inception_4e_output = merge( [inception_4e_1_1, inception_4e_3_3, inception_4e_5_5, inception_4e_pool_1_1], axis=3, mode='concat') pool4_3_3 = max_pool_2d(inception_4e_output, kernel_size=3, strides=2, name='pool_3_3') inception_5a_1_1 = conv_2d(pool4_3_3, 256, filter_size=1, activation='relu', name='inception_5a_1_1') inception_5a_3_3_reduce = conv_2d(pool4_3_3, 160, filter_size=1, activation='relu', name='inception_5a_3_3_reduce') inception_5a_3_3 = conv_2d(inception_5a_3_3_reduce, 320, filter_size=3, activation='relu', name='inception_5a_3_3') inception_5a_5_5_reduce = conv_2d(pool4_3_3, 32, filter_size=1, activation='relu', name='inception_5a_5_5_reduce') inception_5a_5_5 = conv_2d(inception_5a_5_5_reduce, 128, filter_size=5, activation='relu', name='inception_5a_5_5') inception_5a_pool = max_pool_2d(pool4_3_3, kernel_size=3, strides=1, name='inception_5a_pool') inception_5a_pool_1_1 = conv_2d(inception_5a_pool, 128, filter_size=1, activation='relu', name='inception_5a_pool_1_1') inception_5a_output = merge( [inception_5a_1_1, inception_5a_3_3, inception_5a_5_5, inception_5a_pool_1_1], axis=3, mode='concat') inception_5b_1_1 = conv_2d(inception_5a_output, 384, filter_size=1, activation='relu', name='inception_5b_1_1') inception_5b_3_3_reduce = conv_2d(inception_5a_output, 192, filter_size=1, activation='relu', name='inception_5b_3_3_reduce') inception_5b_3_3 = conv_2d(inception_5b_3_3_reduce, 384, filter_size=3, activation='relu', name='inception_5b_3_3') inception_5b_5_5_reduce = conv_2d(inception_5a_output, 48, filter_size=1, activation='relu', name='inception_5b_5_5_reduce') inception_5b_5_5 = conv_2d(inception_5b_5_5_reduce, 128, filter_size=5, activation='relu', name='inception_5b_5_5') inception_5b_pool = max_pool_2d(inception_5a_output, kernel_size=3, strides=1, name='inception_5b_pool') inception_5b_pool_1_1 = conv_2d(inception_5b_pool, 128, filter_size=1, activation='relu', name='inception_5b_pool_1_1') inception_5b_output = merge( [inception_5b_1_1, inception_5b_3_3, inception_5b_5_5, inception_5b_pool_1_1], axis=3, mode='concat') pool5_7_7 = avg_pool_2d(inception_5b_output, kernel_size=7, strides=1) pool5_7_7 = dropout(pool5_7_7, 0.4) loss = fully_connected(pool5_7_7, 2, activation='softmax') network = regression(loss, optimizer='momentum', loss='categorical_crossentropy', learning_rate=0.001) return network
def build_net(network, X, Y, num_classes, num_epochs, checkpoint_path, size_batch, Xval=None, Yval=None, dec_step=100, train=True): tn = tflearn.initializations.truncated_normal(seed=100) xav = tflearn.initializations.xavier(seed=100) nor = tflearn.initializations.normal(seed=100) conv1_7_7 = conv_2d(network, 64, 7, strides=2, activation='relu', name='conv1_7_7_s2') pool1_3_3 = max_pool_2d(conv1_7_7, 3, strides=2) pool1_3_3 = local_response_normalization(pool1_3_3) conv2_3_3_reduce = conv_2d(pool1_3_3, 64, 1, activation='relu', name='conv2_3_3_reduce') conv2_3_3 = conv_2d(conv2_3_3_reduce, 192, 3, activation='relu', name='conv2_3_3') conv2_3_3 = local_response_normalization(conv2_3_3) pool2_3_3 = max_pool_2d(conv2_3_3, kernel_size=3, strides=2, name='pool2_3_3_s2') # 3a inception_3a_1_1 = conv_2d(pool2_3_3, 64, 1, activation='relu', name='inception_3a_1_1') inception_3a_3_3_reduce = conv_2d(pool2_3_3, 96, 1, activation='relu', name='inception_3a_3_3_reduce') inception_3a_3_3 = conv_2d(inception_3a_3_3_reduce, 128, filter_size=3, activation='relu', name='inception_3a_3_3') inception_3a_5_5_reduce = conv_2d(pool2_3_3, 16, filter_size=1, activation='relu', name='inception_3a_5_5_reduce') inception_3a_5_5 = conv_2d(inception_3a_5_5_reduce, 32, filter_size=5, activation='relu', name='inception_3a_5_5') inception_3a_pool = max_pool_2d(pool2_3_3, kernel_size=3, strides=1, name='inception_3a_pool') inception_3a_pool_1_1 = conv_2d(inception_3a_pool, 32, filter_size=1, activation='relu', name='inception_3a_pool_1_1') inception_3a_output = merge([inception_3a_1_1, inception_3a_3_3, inception_3a_5_5, inception_3a_pool_1_1], mode='concat', axis=3) # 3b inception_3b_1_1 = conv_2d(inception_3a_output, 128, filter_size=1, activation='relu', name='inception_3b_1_1') inception_3b_3_3_reduce = conv_2d(inception_3a_output, 128, filter_size=1, activation='relu', name='inception_3b_3_3_reduce') inception_3b_3_3 = conv_2d(inception_3b_3_3_reduce, 192, filter_size=3, activation='relu', name='inception_3b_3_3') inception_3b_5_5_reduce = conv_2d(inception_3a_output, 32, filter_size=1, activation='relu', name='inception_3b_5_5_reduce') inception_3b_5_5 = conv_2d(inception_3b_5_5_reduce, 96, filter_size=5, name='inception_3b_5_5') inception_3b_pool = max_pool_2d(inception_3a_output, kernel_size=3, strides=1, name='inception_3b_pool') inception_3b_pool_1_1 = conv_2d(inception_3b_pool, 64, filter_size=1, activation='relu', name='inception_3b_pool_1_1') inception_3b_output = merge([inception_3b_1_1, inception_3b_3_3, inception_3b_5_5, inception_3b_pool_1_1], mode='concat', axis=3, name='inception_3b_output') pool3_3_3 = max_pool_2d(inception_3b_output, kernel_size=3, strides=2, name='pool3_3_3') # 4a inception_4a_1_1 = conv_2d(pool3_3_3, 192, filter_size=1, activation='relu', name='inception_4a_1_1') inception_4a_3_3_reduce = conv_2d(pool3_3_3, 96, filter_size=1, activation='relu', name='inception_4a_3_3_reduce') inception_4a_3_3 = conv_2d(inception_4a_3_3_reduce, 208, filter_size=3, activation='relu', name='inception_4a_3_3') inception_4a_5_5_reduce = conv_2d(pool3_3_3, 16, filter_size=1, activation='relu', name='inception_4a_5_5_reduce') inception_4a_5_5 = conv_2d(inception_4a_5_5_reduce, 48, filter_size=5, activation='relu', name='inception_4a_5_5') inception_4a_pool = max_pool_2d(pool3_3_3, kernel_size=3, strides=1, name='inception_4a_pool') inception_4a_pool_1_1 = conv_2d(inception_4a_pool, 64, filter_size=1, activation='relu', name='inception_4a_pool_1_1') inception_4a_output = merge([inception_4a_1_1, inception_4a_3_3, inception_4a_5_5, inception_4a_pool_1_1], mode='concat', axis=3, name='inception_4a_output') # 4b inception_4b_1_1 = conv_2d(inception_4a_output, 160, filter_size=1, activation='relu', name='inception_4a_1_1') inception_4b_3_3_reduce = conv_2d(inception_4a_output, 112, filter_size=1, activation='relu', name='inception_4b_3_3_reduce') inception_4b_3_3 = conv_2d(inception_4b_3_3_reduce, 224, filter_size=3, activation='relu', name='inception_4b_3_3') inception_4b_5_5_reduce = conv_2d(inception_4a_output, 24, filter_size=1, activation='relu', name='inception_4b_5_5_reduce') inception_4b_5_5 = conv_2d(inception_4b_5_5_reduce, 64, filter_size=5, activation='relu', name='inception_4b_5_5') inception_4b_pool = max_pool_2d(inception_4a_output, kernel_size=3, strides=1, name='inception_4b_pool') inception_4b_pool_1_1 = conv_2d(inception_4b_pool, 64, filter_size=1, activation='relu', name='inception_4b_pool_1_1') inception_4b_output = merge([inception_4b_1_1, inception_4b_3_3, inception_4b_5_5, inception_4b_pool_1_1], mode='concat', axis=3, name='inception_4b_output') # 4c inception_4c_1_1 = conv_2d(inception_4b_output, 128, filter_size=1, activation='relu', name='inception_4c_1_1') inception_4c_3_3_reduce = conv_2d(inception_4b_output, 128, filter_size=1, activation='relu', name='inception_4c_3_3_reduce') inception_4c_3_3 = conv_2d(inception_4c_3_3_reduce, 256, filter_size=3, activation='relu', name='inception_4c_3_3') inception_4c_5_5_reduce = conv_2d(inception_4b_output, 24, filter_size=1, activation='relu', name='inception_4c_5_5_reduce') inception_4c_5_5 = conv_2d(inception_4c_5_5_reduce, 64, filter_size=5, activation='relu', name='inception_4c_5_5') inception_4c_pool = max_pool_2d(inception_4b_output, kernel_size=3, strides=1) inception_4c_pool_1_1 = conv_2d(inception_4c_pool, 64, filter_size=1, activation='relu', name='inception_4c_pool_1_1') inception_4c_output = merge([inception_4c_1_1, inception_4c_3_3, inception_4c_5_5, inception_4c_pool_1_1], mode='concat', axis=3, name='inception_4c_output') # 4d inception_4d_1_1 = conv_2d(inception_4c_output, 112, filter_size=1, activation='relu', name='inception_4d_1_1') inception_4d_3_3_reduce = conv_2d(inception_4c_output, 144, filter_size=1, activation='relu', name='inception_4d_3_3_reduce') inception_4d_3_3 = conv_2d(inception_4d_3_3_reduce, 288, filter_size=3, activation='relu', name='inception_4d_3_3') inception_4d_5_5_reduce = conv_2d(inception_4c_output, 32, filter_size=1, activation='relu', name='inception_4d_5_5_reduce') inception_4d_5_5 = conv_2d(inception_4d_5_5_reduce, 64, filter_size=5, activation='relu', name='inception_4d_5_5') inception_4d_pool = max_pool_2d(inception_4c_output, kernel_size=3, strides=1, name='inception_4d_pool') inception_4d_pool_1_1 = conv_2d(inception_4d_pool, 64, filter_size=1, activation='relu', name='inception_4d_pool_1_1') inception_4d_output = merge([inception_4d_1_1, inception_4d_3_3, inception_4d_5_5, inception_4d_pool_1_1], mode='concat', axis=3, name='inception_4d_output') # 4e inception_4e_1_1 = conv_2d(inception_4d_output, 256, filter_size=1, activation='relu', name='inception_4e_1_1') inception_4e_3_3_reduce = conv_2d(inception_4d_output, 160, filter_size=1, activation='relu', name='inception_4e_3_3_reduce') inception_4e_3_3 = conv_2d(inception_4e_3_3_reduce, 320, filter_size=3, activation='relu', name='inception_4e_3_3') inception_4e_5_5_reduce = conv_2d(inception_4d_output, 32, filter_size=1, activation='relu', name='inception_4e_5_5_reduce') inception_4e_5_5 = conv_2d(inception_4e_5_5_reduce, 128, filter_size=5, activation='relu', name='inception_4e_5_5') inception_4e_pool = max_pool_2d(inception_4d_output, kernel_size=3, strides=1, name='inception_4e_pool') inception_4e_pool_1_1 = conv_2d(inception_4e_pool, 128, filter_size=1, activation='relu', name='inception_4e_pool_1_1') inception_4e_output = merge([inception_4e_1_1, inception_4e_3_3, inception_4e_5_5, inception_4e_pool_1_1], axis=3, mode='concat') pool4_3_3 = max_pool_2d(inception_4e_output, kernel_size=3, strides=2, name='pool_3_3') # 5a inception_5a_1_1 = conv_2d(pool4_3_3, 256, filter_size=1, activation='relu', name='inception_5a_1_1') inception_5a_3_3_reduce = conv_2d(pool4_3_3, 160, filter_size=1, activation='relu', name='inception_5a_3_3_reduce') inception_5a_3_3 = conv_2d(inception_5a_3_3_reduce, 320, filter_size=3, activation='relu', name='inception_5a_3_3') inception_5a_5_5_reduce = conv_2d(pool4_3_3, 32, filter_size=1, activation='relu', name='inception_5a_5_5_reduce') inception_5a_5_5 = conv_2d(inception_5a_5_5_reduce, 128, filter_size=5, activation='relu', name='inception_5a_5_5') inception_5a_pool = max_pool_2d(pool4_3_3, kernel_size=3, strides=1, name='inception_5a_pool') inception_5a_pool_1_1 = conv_2d(inception_5a_pool, 128, filter_size=1, activation='relu', name='inception_5a_pool_1_1') inception_5a_output = merge([inception_5a_1_1, inception_5a_3_3, inception_5a_5_5, inception_5a_pool_1_1], axis=3, mode='concat') # 5b inception_5b_1_1 = conv_2d(inception_5a_output, 384, filter_size=1, activation='relu', name='inception_5b_1_1') inception_5b_3_3_reduce = conv_2d(inception_5a_output, 192, filter_size=1, activation='relu', name='inception_5b_3_3_reduce') inception_5b_3_3 = conv_2d(inception_5b_3_3_reduce, 384, filter_size=3, activation='relu', name='inception_5b_3_3') inception_5b_5_5_reduce = conv_2d(inception_5a_output, 48, filter_size=1, activation='relu', name='inception_5b_5_5_reduce') inception_5b_5_5 = conv_2d(inception_5b_5_5_reduce, 128, filter_size=5, activation='relu', name='inception_5b_5_5') inception_5b_pool = max_pool_2d(inception_5a_output, kernel_size=3, strides=1, name='inception_5b_pool') inception_5b_pool_1_1 = conv_2d(inception_5b_pool, 128, filter_size=1, activation='relu', name='inception_5b_pool_1_1') inception_5b_output = merge([inception_5b_1_1, inception_5b_3_3, inception_5b_5_5, inception_5b_pool_1_1], axis=3, mode='concat') pool5_7_7 = avg_pool_2d(inception_5b_output, kernel_size=7, strides=1) pool5_7_7 = dropout(pool5_7_7, 0.4) # fc loss = fully_connected(pool5_7_7, num_classes, activation='softmax') network = regression(loss, optimizer='momentum', loss='categorical_crossentropy', learning_rate=0.001) # Train model = tflearn.DNN(network, tensorboard_verbose=0, checkpoint_path=checkpoint_path) if train: start_time = time.time() if Xval is None or Yval is None: model.fit(X, Y, n_epoch=num_epochs, validation_set=0.0, show_metric=True, run_id='hsi_cnn_model', shuffle=True, batch_size=size_batch) else: model.fit(X, Y, n_epoch=num_epochs, validation_set=(Xval, Yval), show_metric=True, run_id='hsi_cnn_model', shuffle=True, batch_size=size_batch) print("\n\n-------------train time: %s seconds\n\n" % (time.time() - start_time)) return model
(X, Y), (X_test, Y_test) = cifar10.load_data() X, Y = shuffle(X, Y) Y = to_categorical(Y, 10) Y_test = to_categorical(Y_test, 10) # Building 'Network In Network' network = input_data(shape=[None, 32, 32, 3]) network = conv_2d(network, 192, 5, activation='relu') network = conv_2d(network, 160, 1, activation='relu') network = conv_2d(network, 96, 1, activation='relu') network = max_pool_2d(network, 3, strides=2) network = dropout(network, 0.5) network = conv_2d(network, 192, 5, activation='relu') network = conv_2d(network, 192, 1, activation='relu') network = conv_2d(network, 192, 1, activation='relu') network = avg_pool_2d(network, 3, strides=2) network = dropout(network, 0.5) network = conv_2d(network, 192, 3, activation='relu') network = conv_2d(network, 192, 1, activation='relu') network = conv_2d(network, 10, 1, activation='relu') network = avg_pool_2d(network, 8) network = flatten(network) network = regression(network, optimizer='adam', loss='categorical_crossentropy', learning_rate=0.001) # Training model = tflearn.DNN(network) model.fit(X, Y, n_epoch=50, shuffle=True, validation_set=(X_test, Y_test), show_metric=True, batch_size=128, run_id='cifar10_net_in_net')