def k_autoencoder4(latent_dim, folder): inputs = kst.md_input((32, 32, 3)) net = kst.conv2d(inputs, 64, (3, 3)) # 32,32,64 net = kst.activation(net, "relu") net = kst.conv2d(net, 64, (3, 3)) # 32,32,64 net = kst.activation(net, "relu") net = kst.maxpool2d(net) # 16,16,64 net = kst.conv2d(net, 128, (3, 3)) # 16,16,128 net = kst.activation(net, "relu") net = kst.conv2d(net, 128, (3, 3)) # 16,16,128 net = kst.activation(net, "relu") net = kst.maxpool2d(net) # 8,8,128 net = kst.conv2d(net, 256, (3, 3)) # 8,8,256 net = kst.activation(net, "relu") net = kst.conv2d(net, 256, (3, 3)) net = kst.flatten(net) # 8*8*256 net = kst.dense(net, 2048) # 1024 net = kst.activation(net, "relu") net = kst.dense(net, latent_dim) encode = kst.activation(net, "relu") net = kst.dense(encode, 2048) #19 net = kst.activation(net, "relu") net = kst.dense(net, 8 * 8 * 256) net = kst.activation(net, "relu") net = kst.reshape(net, (8, 8, 256)) # 4,4,512 # net = kst.deconv2d(net, 512, (3, 3)) # net = kst.activation(net, "relu") # net = kst.deconv2d(net, 512, (3, 3)) # net = kst.upsampling(net, 2) # 8,8,512 net = kst.deconv2d(net, 256, (3, 3)) # 8,8,256 net = kst.activation(net, "relu") net = kst.deconv2d(net, 256, (3, 3)) net = kst.upsampling(net, 2) # 16,16,256 net = kst.deconv2d(net, 128, (3, 3)) # 16,16,128 net = kst.activation(net, "relu") net = kst.deconv2d(net, 128, (3, 3)) net = kst.activation(net, "relu") net = kst.upsampling(net, 2) # 32,32,128 net = kst.deconv2d(net, 64, (3, 3)) # 32,32,64 net = kst.activation(net, "relu") net = kst.deconv2d(net, 64, (3, 3)) net = kst.activation(net, "relu") net = kst.deconv2d(net, 3, (3, 3)) # 32,32,3 decode = kst.activation(net, "sigmoid") auto_encoder = Model(inputs, decode) net_struct = kst.model_summary(auto_encoder, print_out=True, save_dir=folder + "/model_summary.txt") encoder = Model(inputs, encode) encoded_input = kst.md_input(shape=(latent_dim, )) decoding = auto_encoder.layers[19](encoded_input) for layer in auto_encoder.layers[20:]: decoding = layer(decoding) decoder = Model(encoded_input, decoding) return auto_encoder, encoder, decoder
horizontal_flip=True, validation_split=validate_rate ) train_generator = datagen.flow( x_train, y_train, batch_size=bt_size, subset='training' ) validate_generator = datagen.flow( x_train, y_train, batch_size=bt_size, subset='validation' ) K.clear_session() inputs = kst.md_input(shape=(48, 48, 1)) net = kst.conv2d(inputs, 64, (3, 3)) net = kst.batch_norm(net) net = kst.activation(net, "LeakyReLU") net = kst.conv2d(net, 64, (3, 3)) net = kst.batch_norm(net) net = kst.activation(net, "LeakyReLU") net = kst.maxpool2d(net) net = kst.conv2d(net, 128, (3, 3)) net = kst.batch_norm(net) net = kst.activation(net, "LeakyReLU") net = kst.conv2d(net, 128, (3, 3)) net = kst.batch_norm(net) net = kst.activation(net, "LeakyReLU") net = kst.maxpool2d(net) net = kst.conv2d(net, 256, (3, 3))
b_init = ['Constant', 0.01, Constant(0.01)] w_init = ['he_normal', 'he_normal', 'he_normal'] param = {} param['Epochs'] = epochs param['Validate_Rate'] = validate_rate param['Batch_size'] = bt_size param['Learning_Rate'] = learn_rate param['Weight_initializer'] = w_init[0] + " " + w_init[1] param['Bias_initializer'] = b_init[0] + " " + str(b_init[1]) ############################################################ # Build model: ############################################################ K.clear_session() inputs = kst.md_input(shape=img_shape) net = kst.conv2d(inputs, 64, (3, 3), kernel_init=w_init[2], bias_init=b_init[1]) net = kst.batch_norm(net) net = kst.activation(net, "relu") net = kst.maxpool2d(net) net = kst.conv2d(net, 128, (3, 3), kernel_init=w_init[2], bias_init=b_init[1]) net = kst.batch_norm(net) net = kst.activation(net, "relu") net = kst.maxpool2d(net) net = kst.conv2d(net, 256, (3, 3), kernel_init=w_init[2], bias_init=b_init[1]) net = kst.batch_norm(net) net = kst.activation(net, "relu") net = kst.maxpool2d(net) net = kst.flatten(net) net = kst.dense(net, 512, kernel_init=w_init[2], bias_init=b_init[1]) net = kst.batch_norm(net)
loss_train = hist.history['loss'] loss_valid = hist.history['val_loss'] # In[] # Load trained model project = "logs\hw9\D20200625T1712" latent_dim = 256 auto_encoder = load_model(project + "/best_model.h5") # inputs = knt.md_input((32, 32, 3)) inputs = auto_encoder.input encoding = auto_encoder.layers[1](inputs) for layer in auto_encoder.layers[2:24]: encoding = layer(encoding) encoder = Model(inputs, encoding) encoded_input = knt.md_input(shape=(latent_dim,)) decoding = auto_encoder.layers[24](encoded_input) for layer in auto_encoder.layers[25:]: decoding = layer(decoding) decoder = Model(encoded_input, decoding) # In[] # Training history: fig2 = plt.figure() plt.rcParams.update({'font.size': 14}) ax3 = fig2.add_subplot(1, 1, 1) ax3.plot(range(epoch), loss_train, label="Training") ax3.plot(range(epoch), loss_valid, label="Validation") ax3.legend() ax3.set_xlabel("epoch")
def k_inception_v3(n_class, dp): # TODO: make sure the reliability" inputs = kst.md_input((299, 299, 1)) # inputs = K.resize_images(inputs, 299, 299, data_format="channels_last") net = kst.conv2d(inputs, 32, (3, 3), stride=2, padding="VALID") net = kst.batch_norm(net) net = kst.activation(net, "relu") net = kst.conv2d(net, 32, (3, 3), padding="VALID") net = kst.batch_norm(net) net = kst.activation(net, "relu") net = kst.conv2d(net, 64, (3, 3), padding="SAME") net = kst.batch_norm(net) net = kst.activation(net, "relu") net = kst.maxpool2d(net, kernel_size=(3, 3), stride=2, padding="VALID") net = kst.conv2d(net, 80, (1, 1), padding="VALID") net = kst.batch_norm(net) net = kst.activation(net, "relu") net = kst.conv2d(net, 192, (3, 3), padding="VALID") net = kst.batch_norm(net) net = kst.activation(net, "relu") net = kst.maxpool2d(net, (3, 3), padding="VALID") # mixed 5b: branch0 = kst.conv2d(net, 64, (1, 1)) branch0 = kst.batch_norm(branch0) branch0 = kst.activation(branch0, "relu") branch1 = kst.conv2d(net, 48, (1, 1)) branch1 = kst.batch_norm(branch1) branch1 = kst.activation(branch1, "relu") branch1 = kst.conv2d(branch1, 64, (5, 5)) branch1 = kst.batch_norm(branch1) branch1 = kst.activation(branch1, "relu") branch2 = kst.conv2d(net, 64, (1, 1)) branch2 = kst.batch_norm(branch2) branch2 = kst.activation(branch2, 'relu') branch2 = kst.conv2d(branch2, 96, (3, 3)) branch2 = kst.batch_norm(branch2) branch2 = kst.activation(branch2, 'relu') branch2 = kst.conv2d(branch2, 96, (3, 3)) branch2 = kst.batch_norm(branch2) branch2 = kst.activation(branch2, 'relu') branch3 = kst.avgpool2d(net, (3, 3)) branch3 = kst.conv2d(branch3, 32, (1, 1)) branch3 = kst.batch_norm(branch3) branch3 = kst.activation(branch3, "relu") net = kst.concat([branch0, branch1, branch2, branch3], axis=3) # mixed 5c: branch0 = kst.conv2d(net, 64, (1, 1)) branch0 = kst.batch_norm(branch0) branch0 = kst.activation(branch0, "relu") branch1 = kst.conv2d(net, 48, (1, 1)) branch1 = kst.batch_norm(branch1) branch1 = kst.activation(branch1, "relu") branch1 = kst.conv2d(branch1, 64, (5, 5)) branch1 = kst.batch_norm(branch1) branch1 = kst.activation(branch1, "relu") branch2 = kst.conv2d(net, 64, (1, 1)) branch2 = kst.batch_norm(branch2) branch2 = kst.activation(branch2, 'relu') branch2 = kst.conv2d(branch2, 96, (3, 3)) branch2 = kst.batch_norm(branch2) branch2 = kst.activation(branch2, 'relu') branch2 = kst.conv2d(branch2, 96, (3, 3)) branch2 = kst.batch_norm(branch2) branch2 = kst.activation(branch2, 'relu') branch3 = kst.avgpool2d(net, (3, 3)) branch3 = kst.conv2d(branch3, 64, (1, 1)) branch3 = kst.batch_norm(branch3) branch3 = kst.activation(branch3, "relu") net = kst.concat([branch0, branch1, branch2, branch3], axis=3) # mixed 5d: branch0 = kst.conv2d(net, 64, (1, 1)) branch0 = kst.batch_norm(branch0) branch0 = kst.activation(branch0, "relu") branch1 = kst.conv2d(net, 48, (1, 1)) branch1 = kst.batch_norm(branch1) branch1 = kst.activation(branch1, "relu") branch1 = kst.conv2d(branch1, 64, (5, 5)) branch1 = kst.batch_norm(branch1) branch1 = kst.activation(branch1, "relu") branch2 = kst.conv2d(net, 64, (1, 1)) branch2 = kst.batch_norm(branch2) branch2 = kst.activation(branch2, 'relu') branch2 = kst.conv2d(branch2, 96, (3, 3)) branch2 = kst.batch_norm(branch2) branch2 = kst.activation(branch2, 'relu') branch2 = kst.conv2d(branch2, 96, (3, 3)) branch2 = kst.batch_norm(branch2) branch2 = kst.activation(branch2, 'relu') branch3 = kst.avgpool2d(net, (3, 3)) branch3 = kst.conv2d(branch3, 64, (1, 1)) branch3 = kst.batch_norm(branch3) branch3 = kst.activation(branch3, "relu") net = kst.concat([branch0, branch1, branch2, branch3], axis=3) # mixed 6a: branch0 = kst.conv2d(net, 348, (3, 3), stride=2, padding="VALID") branch0 = kst.batch_norm(branch0) branch0 = kst.activation(branch0, "relu") branch1 = kst.conv2d(net, 64, (1, 1)) branch1 = kst.batch_norm(branch1) branch1 = kst.activation(branch1, "relu") branch1 = kst.conv2d(branch1, 96, (3, 3)) branch1 = kst.batch_norm(branch1) branch1 = kst.activation(branch1, "relu") branch1 = kst.conv2d(branch1, 96, (3, 3), stride=2, padding="VALID") branch1 = kst.batch_norm(branch1) branch1 = kst.activation(branch1, "relu") branch2 = kst.maxpool2d(net, (3, 3), padding="VALID") net = kst.concat([branch0, branch1, branch2], axis=3) # mixed 6b: branch0 = kst.conv2d(net, 192, (1, 1)) branch0 = kst.batch_norm(branch0) branch0 = kst.activation(branch0, "relu") branch1 = kst.conv2d(net, 128, (1, 1)) branch1 = kst.batch_norm(branch1) branch1 = kst.activation(branch1, "relu") branch1 = kst.conv2d(branch1, 128, (1, 7)) branch1 = kst.batch_norm(branch1) branch1 = kst.activation(branch1, "relu") branch1 = kst.conv2d(branch1, 192, (7, 1)) branch1 = kst.batch_norm(branch1) branch1 = kst.activation(branch1, "relu") branch2 = kst.conv2d(net, 128, (1, 1)) branch2 = kst.batch_norm(branch2) branch2 = kst.activation(branch2, 'relu') branch2 = kst.conv2d(branch2, 128, (7, 1)) branch2 = kst.batch_norm(branch2) branch2 = kst.activation(branch2, 'relu') branch2 = kst.conv2d(branch2, 128, (1, 7)) branch2 = kst.batch_norm(branch2) branch2 = kst.activation(branch2, 'relu') branch2 = kst.conv2d(branch2, 128, (7, 1)) branch2 = kst.batch_norm(branch2) branch2 = kst.activation(branch2, 'relu') branch2 = kst.conv2d(branch2, 192, (1, 7)) branch2 = kst.batch_norm(branch2) branch2 = kst.activation(branch2, 'relu') branch3 = kst.avgpool2d(net, (3, 3)) branch3 = kst.conv2d(branch3, 192, (1, 1)) branch3 = kst.batch_norm(branch3) branch3 = kst.activation(branch3, "relu") net = kst.concat([branch0, branch1, branch2, branch3], axis=3) # mixed 6c: branch0 = kst.conv2d(net, 192, (1, 1)) branch0 = kst.batch_norm(branch0) branch0 = kst.activation(branch0, "relu") branch1 = kst.conv2d(net, 160, (1, 1)) branch1 = kst.batch_norm(branch1) branch1 = kst.activation(branch1, "relu") branch1 = kst.conv2d(branch1, 160, (1, 7)) branch1 = kst.batch_norm(branch1) branch1 = kst.activation(branch1, "relu") branch1 = kst.conv2d(branch1, 192, (7, 1)) branch1 = kst.batch_norm(branch1) branch1 = kst.activation(branch1, "relu") branch2 = kst.conv2d(net, 160, (1, 1)) branch2 = kst.batch_norm(branch2) branch2 = kst.activation(branch2, 'relu') branch2 = kst.conv2d(branch2, 160, (7, 1)) branch2 = kst.batch_norm(branch2) branch2 = kst.activation(branch2, 'relu') branch2 = kst.conv2d(branch2, 160, (1, 7)) branch2 = kst.batch_norm(branch2) branch2 = kst.activation(branch2, 'relu') branch2 = kst.conv2d(branch2, 160, (7, 1)) branch2 = kst.batch_norm(branch2) branch2 = kst.activation(branch2, 'relu') branch2 = kst.conv2d(branch2, 192, (1, 7)) branch2 = kst.batch_norm(branch2) branch2 = kst.activation(branch2, 'relu') branch3 = kst.avgpool2d(net, (3, 3)) branch3 = kst.conv2d(branch3, 192, (1, 1)) branch3 = kst.batch_norm(branch3) branch3 = kst.activation(branch3, "relu") net = kst.concat([branch0, branch1, branch2, branch3], axis=3) # mixed 6d: branch0 = kst.conv2d(net, 192, (1, 1)) branch0 = kst.batch_norm(branch0) branch0 = kst.activation(branch0, "relu") branch1 = kst.conv2d(net, 160, (1, 1)) branch1 = kst.batch_norm(branch1) branch1 = kst.activation(branch1, "relu") branch1 = kst.conv2d(branch1, 160, (1, 7)) branch1 = kst.batch_norm(branch1) branch1 = kst.activation(branch1, "relu") branch1 = kst.conv2d(branch1, 192, (7, 1)) branch1 = kst.batch_norm(branch1) branch1 = kst.activation(branch1, "relu") branch2 = kst.conv2d(net, 160, (1, 1)) branch2 = kst.batch_norm(branch2) branch2 = kst.activation(branch2, 'relu') branch2 = kst.conv2d(branch2, 160, (7, 1)) branch2 = kst.batch_norm(branch2) branch2 = kst.activation(branch2, 'relu') branch2 = kst.conv2d(branch2, 160, (1, 7)) branch2 = kst.batch_norm(branch2) branch2 = kst.activation(branch2, 'relu') branch2 = kst.conv2d(branch2, 160, (7, 1)) branch2 = kst.batch_norm(branch2) branch2 = kst.activation(branch2, 'relu') branch2 = kst.conv2d(branch2, 192, (1, 7)) branch2 = kst.batch_norm(branch2) branch2 = kst.activation(branch2, 'relu') branch3 = kst.avgpool2d(net, (3, 3)) branch3 = kst.conv2d(branch3, 192, (1, 1)) branch3 = kst.batch_norm(branch3) branch3 = kst.activation(branch3, "relu") net = kst.concat([branch0, branch1, branch2, branch3], axis=3) # mixed 6e: branch0 = kst.conv2d(net, 192, (1, 1)) branch0 = kst.batch_norm(branch0) branch0 = kst.activation(branch0, "relu") branch1 = kst.conv2d(net, 192, (1, 1)) branch1 = kst.batch_norm(branch1) branch1 = kst.activation(branch1, "relu") branch1 = kst.conv2d(branch1, 192, (1, 7)) branch1 = kst.batch_norm(branch1) branch1 = kst.activation(branch1, "relu") branch1 = kst.conv2d(branch1, 192, (7, 1)) branch1 = kst.batch_norm(branch1) branch1 = kst.activation(branch1, "relu") branch2 = kst.conv2d(net, 192, (1, 1)) branch2 = kst.batch_norm(branch2) branch2 = kst.activation(branch2, 'relu') branch2 = kst.conv2d(branch2, 192, (7, 1)) branch2 = kst.batch_norm(branch2) branch2 = kst.activation(branch2, 'relu') branch2 = kst.conv2d(branch2, 192, (1, 7)) branch2 = kst.batch_norm(branch2) branch2 = kst.activation(branch2, 'relu') branch2 = kst.conv2d(branch2, 192, (7, 1)) branch2 = kst.batch_norm(branch2) branch2 = kst.activation(branch2, 'relu') branch2 = kst.conv2d(branch2, 192, (1, 7)) branch2 = kst.batch_norm(branch2) branch2 = kst.activation(branch2, 'relu') branch3 = kst.avgpool2d(net, (3, 3)) branch3 = kst.conv2d(branch3, 192, (1, 1)) branch3 = kst.batch_norm(branch3) branch3 = kst.activation(branch3, "relu") net = kst.concat([branch0, branch1, branch2, branch3], axis=3) aux_logits = net # mixed 7a: branch0 = kst.conv2d(net, 192, (1, 1)) branch0 = kst.batch_norm(branch0) branch0 = kst.activation(branch0, "relu") branch0 = kst.conv2d(branch0, 320, (3, 3), stride=2, padding="VALID") branch0 = kst.batch_norm(branch0) branch0 = kst.activation(branch0, "relu") branch1 = kst.conv2d(net, 192, (1, 1)) branch1 = kst.batch_norm(branch1) branch1 = kst.activation(branch1, "relu") branch1 = kst.conv2d(branch1, 192, (1, 7)) branch1 = kst.batch_norm(branch1) branch1 = kst.activation(branch1, "relu") branch1 = kst.conv2d(branch1, 192, (7, 1)) branch1 = kst.batch_norm(branch1) branch1 = kst.activation(branch1, "relu") branch1 = kst.conv2d(branch1, 192, (3, 3), stride=2, padding="VALID") branch1 = kst.batch_norm(branch1) branch1 = kst.activation(branch1, "relu") branch2 = kst.maxpool2d(net, (3, 3), padding="VALID") net = kst.concat([branch0, branch1, branch2], axis=3) # mixed 7b: branch0 = kst.conv2d(net, 320, (1, 1)) branch0 = kst.batch_norm(branch0) branch0 = kst.activation(branch0, "relu") branch1 = kst.conv2d(net, 384, (1, 1)) branch1 = kst.batch_norm(branch1) branch1 = kst.activation(branch1, "relu") branch1a = kst.conv2d(branch1, 384, (1, 3)) branch1a = kst.batch_norm(branch1a) branch1a = kst.activation(branch1a, "relu") branch1b = kst.conv2d(branch1, 384, (3, 1)) branch1b = kst.batch_norm(branch1b) branch1b = kst.activation(branch1b, "relu") branch1 = kst.concat([branch1a, branch1b], axis=3) branch2 = kst.conv2d(net, 448, (1, 1)) branch2 = kst.batch_norm(branch2) branch2 = kst.activation(branch2, "relu") branch2 = kst.conv2d(branch2, 384, (3, 3)) branch2 = kst.batch_norm(branch2) branch2 = kst.activation(branch2, "relu") branch2a = kst.conv2d(branch2, 384, (1, 3)) branch2a = kst.batch_norm(branch2a) branch2a = kst.activation(branch2a, "relu") branch2b = kst.conv2d(branch1, 384, (3, 1)) branch2b = kst.batch_norm(branch2b) branch2b = kst.activation(branch2b, "relu") branch2 = kst.concat([branch2a, branch2b], axis=3) branch3 = kst.avgpool2d(net, (3, 3)) branch3 = kst.conv2d(branch3, 192, (1, 1)) branch3 = kst.batch_norm(branch3) branch3 = kst.activation(branch3, "relu") net = kst.concat([branch0, branch1, branch2, branch3], axis=3) # mixed 7c: branch0 = kst.conv2d(net, 320, (1, 1)) branch0 = kst.batch_norm(branch0) branch0 = kst.activation(branch0, "relu") branch1 = kst.conv2d(net, 384, (1, 1)) branch1 = kst.batch_norm(branch1) branch1 = kst.activation(branch1, "relu") branch1a = kst.conv2d(branch1, 384, (1, 3)) branch1a = kst.batch_norm(branch1a) branch1a = kst.activation(branch1a, "relu") branch1b = kst.conv2d(branch1, 384, (3, 1)) branch1b = kst.batch_norm(branch1b) branch1b = kst.activation(branch1b, "relu") branch1 = kst.concat([branch1a, branch1b], axis=3) branch2 = kst.conv2d(net, 448, (1, 1)) branch2 = kst.batch_norm(branch2) branch2 = kst.activation(branch2, "relu") branch2 = kst.conv2d(branch2, 384, (3, 3)) branch2 = kst.batch_norm(branch2) branch2 = kst.activation(branch2, "relu") branch2a = kst.conv2d(branch2, 384, (1, 3)) branch2a = kst.batch_norm(branch2a) branch2a = kst.activation(branch2a, "relu") branch2b = kst.conv2d(branch1, 384, (3, 1)) branch2b = kst.batch_norm(branch2b) branch2b = kst.activation(branch2b, "relu") branch2 = kst.concat([branch2a, branch2b], axis=3) branch3 = kst.avgpool2d(net, (3, 3)) branch3 = kst.conv2d(branch3, 192, (1, 1)) branch3 = kst.batch_norm(branch3) branch3 = kst.activation(branch3, "relu") net = kst.concat([branch0, branch1, branch2, branch3], axis=3) # auxlogits: aux_logits = kst.avgpool2d(aux_logits, (5, 5), stride=3, padding="VALID") aux_logits = kst.conv2d(aux_logits, 128, (1, 1)) aux_logits = kst.batch_norm(aux_logits) aux_logits = kst.activation(aux_logits, "relu") aux_logits = kst.conv2d(aux_logits, 768, (5, 5), padding="VALID") aux_logits = kst.batch_norm(aux_logits) aux_logits = kst.activation(aux_logits, "relu") aux_logits = kst.conv2d(aux_logits, n_class, (1, 1)) aux_logits = kst.flatten(aux_logits) # aux_logits = kst.squeeze(aux_logits, 1) # aux_logits = kst.squeeze(aux_logits, 1) aux_predict = kst.activation(aux_logits, "softmax") # Logits: net = kst.avgpool2d(net, (8, 8), padding="VALID") net = kst.dropout(net, dp) logits = kst.conv2d(net, n_class, (1, 1)) # logits = kst.squeeze(logits, 1) # logits = kst.squeeze(logits, 1) logits = kst.flatten(logits) predict = kst.activation(logits, "softmax") inception_v3 = Model(inputs, [predict, aux_predict]) net_struct = kst.model_summary(inception_v3, print_out=True) # aux_inception = Model(inputs, aux_predict) return inception_v3