train_generator = train_datagen.flow_from_directory( train_dir, target_size=(image_size, image_size), batch_size=train_batchsize, class_mode='categorical') # Data Generator for Validation data validation_generator = validation_datagen.flow_from_directory( validation_dir, target_size=(image_size, image_size), batch_size=val_batchsize, class_mode='categorical', shuffle=False) model = Sequential() metrics_epoch = MetricsWithGenerator(validation_generator) # Next, we declare the input layer: The input shape parameter should be the shape of 1 sample. model.add(Conv2D(32, kernel_size=(3, 3), activation='relu', input_shape=[image_size, image_size, 3])) # Add another layer model.add(Conv2D(64, (3, 3), activation='relu')) # MaxPooling2D is a way to reduce the number of parameters in our model by sliding a 2x2 pooling filter across the # previous layer and taking the max of the 4 values in the 2x2 filter. model.add(MaxPooling2D(pool_size=(2, 2))) # Dropout layer we just added. This is a method for regularizing our model in order to prevent overfitting.
# Data Generator for Training data train_generator = train_datagen.flow_from_directory(train_dir, target_size=(image_size, image_size), batch_size=train_batchsize, class_mode='binary') # Data Generator for Validation data validation_generator = validation_datagen.flow_from_directory( validation_dir, target_size=(image_size, image_size), batch_size=val_batchsize, class_mode='binary', shuffle=False) customMetrics = MetricsWithGenerator(validation_generator) model = Sequential() # Next, we declare the input layer: The input shape parameter should be the shape of 1 sample. model.add( Conv2D(32, kernel_size=(3, 3), activation='relu', input_shape=[200, 200, 3])) # Add another layer model.add(Conv2D(64, (3, 3), activation='relu')) # MaxPooling2D is a way to reduce the number of parameters in our model by sliding a 2x2 pooling filter across the # previous layer and taking the max of the 4 values in the 2x2 filter.