def get_model(class_names, base_weights=None, weights_path=None, image_dimension=224, color_mode='grayscale', weight_decay=1e-4, class_mode='multiclass', final_activation="softmax"): """ Create model for transfer learning Arguments: class_names - list of str weights_path - str Returns: model - Keras model """ if weights_path is not None: base_weights = None base_model = DenseNet121(include_top=False, weights=base_weights, pooling="None") x = base_model.output x = GlobalAveragePooling2D()(x) # dense layers for different class predictions = [] if class_mode == 'multiclass': print(f"** Final layer activation function is {final_activation}") prediction = Dense(4096, kernel_regularizer=regularizers.l2(weight_decay), name="fc_hidden_layer1")(x) predictions = Dense( len(class_names), activation=final_activation, name="fc_output_layer", kernel_regularizer=regularizers.l2(weight_decay))(prediction) elif class_mode == 'multibinary': for i, class_name in enumerate(class_names): prediction = Dense( 1024, kernel_regularizer=regularizers.l2(weight_decay))(x) prediction = Dense( 1, kernel_regularizer=regularizers.l2(weight_decay), activation="sigmoid", name=class_name)(prediction) predictions.append(prediction) model = Model(inputs=base_model.input, outputs=predictions) if weights_path is not None: model.load_weights(weights_path) model.base_model = base_model return model
max_score = 0 epoch = 200 for i in xrange(epoch): model.fit([codes, partial_captions], next_words, batch_size=128, nb_epoch=1, validation_data=([codesT, partial_captionsT], next_wordsT)) reference_file = "ref_UNK.txt" predictions = [] for j, id in enumerate(ids): predictions.append( str(id) + "\t" + gen.printSentence(sens[j][0][1:-1]) + '\n') del gen (goldMap, predictionMap) = computeMaps(predictions, reference_file) score = bleuFromMaps(goldMap, predictionMap)[0] print 'eter' + str(i) print score if max_score < score and i > 5: max_score = score json_string = model.to_json() open('../model/codenn_512.json', 'w').write(json_string) model.save_weights('../model/codenn_512.h5')