def test_no_effect_on_layers(self): utils.setup_trainable_layers(self.model) # extract layers trainable boooleans in to separate list affected_layers_bool = [] for layer in self.model.layers: affected_layers_bool.append(layer.trainable) self.assertListEqual(self.layers_bool, affected_layers_bool)
def test_no_layers_are_freezing_when_number_of_specified_layers_is_0(self): nb_layers = 0 utils.setup_trainable_layers(self.model, nb_layers) # extract layers trainable boooleans in to separate list affected_layers_bool = [] for layer in self.model.layers: affected_layers_bool.append(layer.trainable) # check layer trainable booleans are all set to True self.assertTrue(all(affected_layers_bool))
def test_layers_after_specified_number_of_layers_are_not_freezed(self): nb_layers = len(self.model.layers) // 2 utils.setup_trainable_layers(self.model, nb_layers) # extract layers trainable boooleans in to separate list affected_layers_bool = [] for layer in self.model.layers: affected_layers_bool.append(layer.trainable) # check layer trainable booleans prior to specified index are changed to False self.assertTrue(all(affected_layers_bool[nb_layers:]))
def test_first_specified_number_of_layers_are_freezed(self): nb_layers = len(self.model.layers) // 2 utils.setup_trainable_layers(self.model, nb_layers) # extract layers trainable boooleans in to separate list affected_layers_bool = [] for layer in self.model.layers: affected_layers_bool.append(layer.trainable) # check layer trainable booleans prior to specified index are changed to False self.assertListEqual(affected_layers_bool[:nb_layers], [False for i in range(nb_layers)])
def test_all_layers_are_freezing_when_number_of_layers_is_same_as_model_length( self): nb_layers = len(self.model.layers) utils.setup_trainable_layers(self.model, nb_layers) # extract layers trainable boooleans in to separate list affected_layers_bool = [] for layer in self.model.layers: affected_layers_bool.append(layer.trainable) # check layer trainable booleans prior to specified index are changed to False self.assertListEqual(affected_layers_bool, [False for i in range(nb_layers)])
def build_finetuned_model(args, input_shape, fc_size): """ Builds a finetuned VGG model from VGGFace implementation with no weights loaded and setting up new fresh prediction layers at last Args: args: necessary args needed for training like train_data_dir, batch_size etc... input_shape: shape of input tensor fc_size: number of nodes to be used in last layers will be based on this value i.e its multiples may be used Returns: finetuned vgg model """ # setup model vgg, base_vgg = VGGWithCustomLayers(args.nb_classes, input_shape, fc_size) # setup layers to be trained or not setup_trainable_layers(vgg, args.layers_to_freeze) # compiling the model vgg.compile(optimizer=SGD(lr=0.0001, momentum=0.9), loss='categorical_crossentropy', metrics=['accuracy']) return vg
def build_finetuned_model(args, input_shape, fc_size): """ Builds a finetuned InceptionV3 model from tensorflow implementation with imagenet weights loaded and setting up new fresh prediction layers at last Args: args: necessary args needed for training like train_data_dir, batch_size etc... input_shape: shape of input tensor fc_size: number of nodes to be used in last layers will be based on this value i.e its multiples may be used Returns: finetuned inceptionV3 model """ # setup model iv3, base_iv3 = InceptionV3WithCustomLayers(args.nb_classes, input_shape, fc_size) setup_trainable_layers(iv3, args.layers_to_freeze) # compiling the model iv3.compile(optimizer='RMSprop', loss='categorical_crossentropy', metrics=['accuracy']) return iv3
def test_raises_valueError_when_number_of_layers_specified_is_negative( self): nb_layers = -1 with self.assertRaises(ValueError) as ve: utils.setup_trainable_layers(self.model, nb_layers)
def test_raises_valueError_when_number_of_layers_specified_is_greater_than_available_layers( self): nb_layers = len(self.model.layers) + 1 with self.assertRaises(ValueError) as ve: utils.setup_trainable_layers(self.model, nb_layers)