from keras.layers import Conv2D, MaxPooling2D from keras.layers import Flatten, Dense, Dropout from keras import backend as K import os import pandas as pd os.chdir('/content') #Google Colab notation import utils from keras.callbacks import ModelCheckpoint, EarlyStopping #import PIL.Image os.getcwd() train_dir, validation_dir, test_dir, nb_train_samples, nb_validation_samples,nb_test_samples = \ utils.preapare_full_dataset_for_flow( train_dir_original='train', #Google Colab notation test_dir_original='test', #Google Colab notation target_base_dir='target base dir') #Google Colab notation img_width, img_height = 150, 150 epochs = 2 batch_size = 20 if K.image_data_format() == 'channels_first': input_shape = (3, img_width, img_height) else: input_shape = (img_width, img_height, 3) model = Sequential() model.add(Conv2D(32, (3, 3), activation='relu', input_shape=input_shape)) model.add(MaxPooling2D((2, 2))) model.add(Conv2D(64, (3, 3), activation='relu'))
test_generator = datagen.flow_from_directory(test_dir, target_size=(img_width, img_height), batch_size=batch_size, class_mode=None, shuffle=False) bottleneck_features_test = model.predict_generator( test_generator, len(test_generator)) np.save(open('bottleneck_features_test.npy', 'wb'), bottleneck_features_test) else: print('bottleneck directory already exists') train_dir, validation_dir, test_dir, nb_train_samples, nb_validation_samples,nb_test_samples = \ utils.preapare_full_dataset_for_flow( train_dir_original='D:\\Data-Science\\Distracted Driver\\imgs\\train', test_dir_original='D:\\Data-Science\\Distracted Driver\\imgs\\test', target_base_dir='D:\\Data-Science\\Distracted Driver\\bottle_neck_target') model = applications.VGG16(include_top=False, weights='imagenet', input_shape=(img_width, img_height, 3)) bottleneck_dir = 'D:\\Data-Science\\Distracted Driver\\bottleneck_features' preprocess = applications.vgg16.preprocess_input save_bottlebeck_features(model, preprocess, train_dir, validation_dir, test_dir, bottleneck_dir) if (os.getcwd() == "D:\\Data-Science\\Distracted Driver"): os.chdir("D:/Data-Science/Distracted Driver/bottleneck_features") X_train = np.load(open('bottleneck_features_train.npy', 'rb'))
from keras.layers import Flatten, Dense from keras import backend as K #import collections import os import pandas as pd os.chdir("D:/Data Science/Distracted Driver") import utils #Early stopping is required when systme realizes that there is no improvement after ceratin epochs from keras.callbacks import ModelCheckpoint, EarlyStopping #import PIL.Image os.getcwd() train_dir, validation_dir, test_dir, nb_train_samples, nb_validation_samples,nb_test_samples = \ utils.preapare_full_dataset_for_flow( train_dir_original='D:\\Data-Science\\Distracted Driver\\train', test_dir_original='D:\\Data-Science\\Distracted Driver\\test', target_base_dir='D:\\Data-Science\\Distracted Driver\\target base dir') #Convert all images to standard width and height img_width, img_height = 150, 150 epochs = 30 batch_size = 20 #Channels first is for Tensorflow for input shape if K.image_data_format() == 'channels_first': input_shape = (3, img_width, img_height) else: #For non Tensorflow the imput shape is different input_shape = (img_width, img_height, 3) model = Sequential() #CNN modedl with 32 filter and the filter size of 3X3 and stride as 1 and padding as 0
#FFNN requires Flatten, Dense from keras.layers import Dense, Flatten #from keras import backend as K os.chdir('D:\\Data Science\\deeplearning\\Python scripts\\kaggle-cats vs dogs') import utils #Early stopping is required when system realizes that there is no improvement after ceratin epochs from keras.callbacks import ModelCheckpoint, EarlyStopping #Pip install pillow #import PIL.Image+ os.getcwd() #Prepare small/full data set by calling Utils class method train_dir, validation_dir, test_dir, nb_train_samples, nb_validation_samples, nb_test_samples = \ utils.preapare_full_dataset_for_flow( train_dir_original='D:\\Data Science\\Data\\CatsVsDogs\\train', test_dir_original='D:\\Data Science\\Data\\CatsVsDogs\\test', target_base_dir='D:\\Data Science\\Data\\CatsVsDogs\\target base dir') #Convert all images to standard width and height img_width, img_height = 150, 150 epochs = 2 #30 batch_size = 20 # ============================================================================= # #Channels first for NON Tensorflow/Keras # if K.image_data_format() == 'channels_first': # input_shape = (3, img_width, img_height) # else: #For Tensorflow the imput shape is different # input_shape = (img_width, img_height, 3) # =============================================================================
from keras.layers import Dense, Flatten #from keras import backend as K os.chdir( '/Users/bharath/Desktop/Folder/Data Science/Projects/dogs-vs-cats-redux-kernels-edition' ) import utils #Early stopping is required when system realizes that there is no improvement after ceratin epochs from keras.callbacks import ModelCheckpoint, EarlyStopping #Pip install pillow import PIL.Image os.getcwd() #Prepare small/full data set by calling Utils class method train_dir, validation_dir, test_dir, nb_train_samples, nb_validation_samples, nb_test_samples = \ utils.preapare_full_dataset_for_flow( train_dir_original='/Users/bharath/Desktop/Folder/Data Science/Projects/dogs-vs-cats-redux-kernels-edition/train', test_dir_original='/Users/bharath/Desktop/Folder/Data Science/Projects/dogs-vs-cats-redux-kernels-edition/test', target_base_dir='/Users/bharath/Desktop/Folder/Data Science/Projects/dogs-vs-cats-redux-kernels-edition/target base dir') #Convert all images to standard width and height img_width, img_height = 150, 150 epochs = 2 #30 batch_size = 20 # ============================================================================= # #Channels first for NON Tensorflow/Keras # if K.image_data_format() == 'channels_first': # input_shape = (3, img_width, img_height) # else: #For Tensorflow the imput shape is different # input_shape = (img_width, img_height, 3) # =============================================================================
#FFNN requires Flatten, Dense from keras.layers import Dense, Flatten from keras import backend as K os.chdir('D:\Data\Deep Learning\dogs-vs-cats') import utils #Early stopping is required when system realizes that there is no improvement after ceratin epochs from keras.callbacks import ModelCheckpoint, EarlyStopping #Pip install pillow #import PIL.Image os.getcwd() #Prepare small/full data set by calling Utils class method train_dir, validation_dir, test_dir, nb_train_samples, nb_validation_samples, nb_test_samples = \ utils.preapare_full_dataset_for_flow( train_dir_original='D:\Data\Deep Learning\dogs-vs-cats\train', test_dir_original='D:\Data\Deep Learning\dogs-vs-cats\test', target_base_dir='D:\Data\Deep Learning\dogs-vs-cats\target base dir') #Convert all images to standard width and height img_width, img_height = 150, 150 epochs = 2 #30 batch_size = 20 #Channels first for NON Tensorflow/Keras if K.image_data_format() == 'channels_first': input_shape = (3, img_width, img_height) else: #For Tensorflow the imput shape is different input_shape = (img_width, img_height, 3) #Model begins here model = Sequential()