class Test(unittest.TestCase):
	def setUp(self):
		import Activity08_01
		self.exercises = Activity08_01

		self.file_url = 'https://github.com/PacktWorkshops/The-TensorFlow-Workshop/blob/master/Chapter08/dataset/fruits360.zip'
		self.zip_dir = tf.keras.utils.get_file('fruits360.zip', origin=self.file_url, extract=True)
		self.path = pathlib.Path(self.zip_dir).parent / 'fruits360_filtered'
		self.train_dir = self.path / 'Training'
		self.validation_dir = self.path / 'Test'
		self.total_train = 11398
		self.total_val = 4752

		self.train_image_generator = ImageDataGenerator(rescale=1./255, rotation_range=40, width_shift_range=0.1, height_shift_range=0.1, shear_range=0.2, zoom_range=0.2, horizontal_flip=True, fill_mode='nearest')
		self.validation_image_generator = ImageDataGenerator(rescale=1./255)
		self.batch_size = 32
		self.img_height = 224
		self.img_width = 224
		self.channel = 3
		self.train_data_gen = self.train_image_generator.flow_from_directory(batch_size=self.batch_size, directory=self.train_dir, target_size=(self.img_height, self.img_width))
		self.val_data_gen = self.validation_image_generator.flow_from_directory(batch_size=self.batch_size, directory=self.validation_dir, target_size=(self.img_height, self.img_width))

		np.random.seed(8)
		tf.random.set_seed(8)

		self.base_model = NASNetMobile(include_top=False, input_shape=(self.img_height, self.img_width, self.channel), weights='imagenet')

		for layer in self.base_model.layers[:700]:
			layer.trainable = False

		self.model = tf.keras.Sequential([
			self.base_model,
			layers.Flatten(),
			layers.Dense(500, activation='relu'),
			layers.Dense(120, activation='softmax')
		])
		self.model.compile(loss='categorical_crossentropy', optimizer=tf.keras.optimizers.Adam(0.001), metrics=['accuracy'])

	def test_file_url(self):
		self.assertEqual(self.exercises.file_url, self.file_url)

	def test_total_train(self):
		self.assertEqual(self.exercises.total_train, self.total_train)

	def test_total_val(self):
		self.assertEqual(self.exercises.total_val, self.total_val)

	def test_base_model_summary(self):
		self.assertEqual(self.exercises.base_model.summary(), self.base_model.summary())

	def test_model_summary(self):
		self.assertEqual(self.exercises.model.summary(), self.model.summary())
Esempio n. 2
0
 def build_model(self):
     conv_base = NASNetMobile(include_top=False,
                              weights='imagenet',
                              input_shape=config.input_shape)
     conv_base.trainable = False
     print(conv_base.summary())
     inputs = Input(shape=config.input_shape)
     x = conv_base(inputs)
     x = Flatten()(x)
     x = Dense(units=256, activation=nasnet['activation'])(x)
     outputs = Dense(units=config.num_classes,
                     activation=nasnet['outact'])(x)
     self.model = Model(inputs=inputs, outputs=outputs)
Esempio n. 3
0
aaa = 1 
x_train = x_train.reshape(x_train.shape[0], x_train.shape[1], x_train.shape[2], aaa)
x_test = x_test.reshape(x_test.shape[0], x_test.shape[1], x_test.shape[2], aaa)
print(x_train.shape, y_train.shape) # (3628, 128, 862, 1) (3628,)
print(x_test.shape, y_test.shape)   # (908, 128, 862, 1) (908,)

model = NASNetMobile(
    include_top=True,
    input_shape=(128,862,1),
    classes=2,
    pooling=None,
    weights=None,
)

model.summary()
# model.trainable = False

model.save('C:/nmb/nmb_data/h5/5s/Nasnet/nasnet_adadelta_1.h5')

# 컴파일, 훈련
op = Adadelta(lr=1e-3)
batch_size = 4

es = EarlyStopping(monitor='val_loss', patience=20, restore_best_weights=True, verbose=1)
lr = ReduceLROnPlateau(monitor='val_loss', vactor=0.5, patience=10, verbose=1)
path = 'C:/nmb/nmb_data/h5/5s/Nasnet/nasnet_adadelta_1.h5'
mc = ModelCheckpoint(path, monitor='val_loss', verbose=1, save_best_only=True)

model.compile(optimizer=op, loss="sparse_categorical_crossentropy", metrics=['acc'])
history = model.fit(x_train, y_train, epochs=1000, batch_size=batch_size, validation_split=0.2, callbacks=[es, lr, mc])
Esempio n. 4
0
from tensorflow.keras.layers import Dense, Flatten, UpSampling2D
from tensorflow.keras.callbacks import EarlyStopping, ReduceLROnPlateau

(x_train, y_train), (x_test, y_test) = cifar10.load_data()

y_train = to_categorical(y_train)
y_test = to_categorical(y_test)

print(x_train.shape, x_test.shape)
print(y_train.shape, y_test.shape)

nasnetmobile = NASNetMobile(weights='imagenet',
                            include_top=False,
                            input_shape=(224, 224, 3))

nasnetmobile.summary()

nasnetmobile.trainable = False

model = Sequential()
model.add(UpSampling2D(size=(7, 7)))
model.add(nasnetmobile)
model.add(Flatten())
model.add(Dense(256))
model.add(Dense(64))
model.add(Dense(10, activation='softmax'))

# model.summary()

model.compile(loss='categorical_crossentropy',
              optimizer='adam',
# test_dataset = train.flow_from_directory('C:/Users/param/Face-Mask-Detection/dataset/test',
# target_size=(224, 224),
# batch_size=32,
# class_mode='binary')
# train_dataset.class_indices

# local_weights_file = 'C:/Users/param/Face-Mask-Detection/face_detector/inceptionv3-model-10ep.h5'
pre_trained_model = NASNetMobile(weights="imagenet",
                                 include_top=False,
                                 input_tensor=Input(shape=(224, 224, 3)))

# pre_trained_model.load_weights(local_weights_file)
#

#
pre_trained_model.summary()

last_layer = pre_trained_model.get_layer('mixed7')
# print('last layer output shape: ', last_layer.output_shape)
last_output = last_layer.output
# construct the head of the model that will be placed on top of the
# the base model
# headModel = pre_trained_model.output
# headModel = AveragePooling2D(pool_size=(5, 5))(last_output)
# headModel = BatchNormalization(axis=1)(last_output)
# headModel = Conv2D(32, (3, 3), padding="same", activation="relu")(headModel)
headModel = AveragePooling2D(pool_size=(5, 5))(last_output)
headModel = Flatten(name="flatten")(headModel)
# headModel = Dropout(0.5)(headModel)
headModel = Dense(512, activation="relu")(headModel)
headModel = Dropout(0.5)(headModel)
Esempio n. 6
0
class Test(unittest.TestCase):
    def setUp(self):
        import Exercise08_01
        self.exercises = Exercise08_01

        self.file_url = 'https://storage.googleapis.com/mledu-datasets/cats_and_dogs_filtered.zip'
        self.zip_dir = tf.keras.utils.get_file('cats_and_dogs.zip',
                                               origin=self.file_url,
                                               extract=True)
        self.path = pathlib.Path(
            self.zip_dir).parent / 'cats_and_dogs_filtered'
        self.train_dir = self.path / 'train'
        self.validation_dir = self.path / 'validation'
        self.total_train = 2000
        self.total_val = 1000

        self.train_image_generator = ImageDataGenerator(rescale=1. / 255)
        self.validation_image_generator = ImageDataGenerator(rescale=1. / 255)
        self.batch_size = 32
        self.img_height = 224
        self.img_width = 224
        self.channel = 3
        self.train_data_gen = self.train_image_generator.flow_from_directory(
            batch_size=self.batch_size,
            directory=self.train_dir,
            shuffle=True,
            target_size=(self.img_height, self.img_width),
            class_mode='binary')
        self.val_data_gen = self.validation_image_generator.flow_from_directory(
            batch_size=self.batch_size,
            directory=self.validation_dir,
            target_size=(self.img_height, self.img_width),
            class_mode='binary')

        np.random.seed(8)
        tf.random.set_seed(8)

        self.base_model = NASNetMobile(include_top=False,
                                       input_shape=(self.img_height,
                                                    self.img_width,
                                                    self.channel),
                                       weights='imagenet')
        self.base_model.trainable = False

        self.model = tf.keras.Sequential([
            self.base_model,
            layers.Flatten(),
            layers.Dense(500, activation='relu'),
            layers.Dense(1, activation='sigmoid')
        ])
        self.model.compile(loss='binary_crossentropy',
                           optimizer=tf.keras.optimizers.Adam(0.001),
                           metrics=['accuracy'])

    def test_file_url(self):
        self.assertEqual(self.exercises.file_url, self.file_url)

    def test_total_train(self):
        self.assertEqual(self.exercises.total_train, self.total_train)

    def test_total_val(self):
        self.assertEqual(self.exercises.total_val, self.total_val)

    def test_base_model_summary(self):
        self.assertEqual(self.exercises.base_model.summary(),
                         self.base_model.summary())

    def test_model_summary(self):
        self.assertEqual(self.exercises.model.summary(), self.model.summary())