def read(self, img_rows=IMAGE_SIZE1, img_cols=IMAGE_SIZE2):
        train_datagen = ImageDataGenerator(rescale=1. / 255)

        valid_datagen = ImageDataGenerator(rescale=1. / 255)

        test_datagen = ImageDataGenerator(rescale=1. / 255)

        train_generator = train_datagen.flow_from_directory(
            train_data_dir,
            target_size=(img_rows, img_cols),
            batch_size=batch_size,
            class_mode='binary')

        validation_generator = valid_datagen.flow_from_directory(
            validation_data_dir,
            target_size=(img_rows, img_cols),
            batch_size=batch_size,
            class_mode='binary')

        test_generator = test_datagen.flow_from_directory(
            test_data_dir,
            target_size=(img_rows, img_cols),
            batch_size=batch_size,
            class_mode='binary')

        self.train = train_generator
        self.valid = validation_generator
        self.test = test_generator
Esempio n. 2
0
    def __init__(self,
                 train_dir,
                 validate_dir,
                 batch_size=100,
                 data_augmentation=True):
        if data_augmentation:
            print("Enable data augmentation")
            train_datagen = ImageDataGenerator(
                #rescale=1./255,
                preprocessing_function=lambda x: x / 255 - 0.5,
                shear_range=0.2,
                zoom_range=0.2,
                width_shift_range=0.3,
                height_shift_range=0.3,
                horizontal_flip=True,
                fill_mode='nearest')
        else:
            print("Disable data augmentation")
            train_datagen = ImageDataGenerator(
                preprocessing_function=lambda x: x / 255 - 0.5)
        validation_datagen = ImageDataGenerator(
            preprocessing_function=lambda x: x / 255 - 0.5)
        train_generator_flow = train_datagen.flow_from_directory(
            train_dir,
            target_size=(299, 299),
            batch_size=batch_size,
            class_mode="input")

        # this is a similar generator, for validation data
        validation_generator_flow = validation_datagen.flow_from_directory(
            validate_dir,
            target_size=(299, 299),
            batch_size=batch_size,
            class_mode="input")
        self.train_generator_flow = train_generator_flow
        self.validation_generator_flow = validation_generator_flow
Esempio n. 3
0
def generator_for_input(data_dir, batch_size, img_rows, img_cols, seed=0):
    print("generator_for_mulinputs params :", data_dir, batch_size, img_rows,
          img_cols)
    datagen = ImageDataGenerator(rescale=1. / 255, horizontal_flip=True)
    print("all propertys of datagen", datagen)
    # 打印对象的所有属性值的方法
    # print("\n".join(['%s:%s' % item for item in train_datagen.__dict__.items()]))
    gen = datagen.flow_from_directory(
        data_dir,
        target_size=(img_rows, img_cols),
        batch_size=batch_size,
        class_mode='categorical',  # 多分类; 'binary')
        seed=seed)  # seed必须一致,生成的数据顺序前后才是一样的。
    # print("gen.filenames", gen.filenames)  # 按顺序输出文件的名字
    print("gen.class_indices", gen.class_indices)  # 输出对应的标签文件夹
    while True:
        inputx, inputy = next(gen)
        yield inputx, inputy
test_data_dir = 'data/miniimagenet/val'  #
img_rows = 224  # 227
img_cols = 224  # 227
epochs = 3
# 批量大小
batch_size = 4
# 训练样本总数
nb_train_samples = 3120  #3*1040
#all num of val samples
nb_validation_samples = 780  #3*260
##################数据导入及预处理###########################
train_datagen = ImageDataGenerator(rescale=1. / 255, horizontal_flip=True)
test_datagen = ImageDataGenerator(rescale=1. / 255)
train_generator = train_datagen.flow_from_directory(
    train_data_dir,
    target_size=(img_rows, img_cols),
    batch_size=batch_size,
    class_mode='categorical')  #多分类; 'binary')
print("train_generator.filenames", train_generator.filenames)  # 按顺序输出文件的名字
print("train_generator.class_indices",
      train_generator.class_indices)  #输出对应的标签文件夹
validation_generator = test_datagen.flow_from_directory(
    test_data_dir,
    target_size=(img_rows, img_cols),
    batch_size=batch_size,
    class_mode='categorical')  #多分类; 'binary')
print("validation_generator.filenames",
      validation_generator.filenames)  # 按顺序输出文件的名字
print("validation_generator.class_indices",
      validation_generator.class_indices)  #输出对应的标签文件夹
##################网络搭建###########################
Esempio n. 5
0
classifier.compile(optimizer='adam',
                   loss='binary_crossentropy',
                   metrics=['accuracy'])

# Part 2 - Fitting the CNN to the images

batch_size = 32
train_datagen = ImageDataGenerator(rescale=1. / 255,
                                   shear_range=0.2,
                                   zoom_range=0.2,
                                   horizontal_flip=True)

test_datagen = ImageDataGenerator(rescale=1. / 255)

training_set = train_datagen.flow_from_directory(train_set_path,
                                                 target_size=input_size,
                                                 batch_size=batch_size,
                                                 class_mode='binary')

test_set = test_datagen.flow_from_directory(test_set_path,
                                            target_size=input_size,
                                            batch_size=batch_size,
                                            class_mode='binary')

# Create a loss history
history = LossHistory()

# train model
classifier.fit_generator(training_set,
                         steps_per_epoch=8000 / batch_size,
                         epochs=90,
                         validation_data=test_set,
classifier.compile(optimizer=opt, loss='categorical_crossentropy', metrics=['accuracy'])

#ağı öğretmek için her epochste datasetten  kaçar tane resim alacak 
batch_size = 10
train_datagen = ImageDataGenerator(rescale = 1./255,
                                   shear_range = 0.2,
                                   zoom_range = 0.2,
                                   horizontal_flip = True)

validation_datagen = ImageDataGenerator(rescale=1./255,validation_split=0.10)
test_datagen = ImageDataGenerator(rescale=1./ 255)


training_set = train_datagen.flow_from_directory(training_set_path,
                                                 target_size=input_size,
                                                 batch_size=batch_size,
                                                 subset="training",
                                                 class_mode='categorical')
validation_set = validation_datagen.flow_from_directory(validation_set_path,
                                            target_size=input_size,
                                            batch_size=batch_size,
                                            subset="validation",
                                            class_mode='categorical')

test_set = test_datagen.flow_from_directory(test_set_path,
                                            target_size=input_size,
                                            color_mode="rgb",
                                            shuffle = False,
                                            batch_size=1,
                                            class_mode='categorical')
# Compiling the CNN
classifier.compile(optimizer='adam',
                   loss='binary_crossentropy',
                   metrics=['accuracy'])

# Part 2 - Fitting the CNN to the images
batch_size = 32
train_datagen = ImageDataGenerator(rescale=1. / 255,
                                   shear_range=0.2,
                                   zoom_range=0.2,
                                   horizontal_flip=True)

test_datagen = ImageDataGenerator(rescale=1. / 255)

training_set = train_datagen.flow_from_directory('dataset/training_set',
                                                 target_size=input_size,
                                                 batch_size=batch_size,
                                                 class_mode='binary')

test_set = test_datagen.flow_from_directory('dataset/test_set',
                                            target_size=input_size,
                                            batch_size=batch_size,
                                            class_mode='binary')


# Create a loss history
class LossHistory(Callback):
    def __init__(self):
        super().__init__()
        self.epoch_id = 0
        self.losses = ''
Esempio n. 8
0
def add_data(model, train_images, train_labels, test_images, test_labels, train_ratio):
    from tensorflow.contrib.keras.api.keras.preprocessing.image import ImageDataGenerator
    image_size = int(config['DEFAULT']['IMAGE_SIZE'])

    datagen = ImageDataGenerator()
    generator = datagen.flow_from_directory(
        utils.CATEGORIES_LOCATION,
        color_mode='grayscale',
        target_size=(image_size, image_size),
        batch_size=1,
        class_mode='binary')

    number_of_images = generator.samples
    number_of_categories = generator.num_classes
    number_processed = 0
    images = []
    labels = []

    # is there any data?
    if number_of_images == 0:
        return None

    # stores how many images of each category are present
    number_per_category = {c: 0.0 for c in range(number_of_categories)}

    while number_processed < number_of_images:
        item = next(generator)
        image = numpy.array(item[0], dtype=numpy.uint8).reshape(1, image_size, image_size, 1)
        if model == "regression":
            image = ((255 - image) / 255.0)
        elif model == "CNN":
            image = (((255 - image) / 255.0) - 0.5)
        image = numpy.reshape(image, [1, -1])
        label = int(item[1][0])
        number_per_category[label] += 1.0
        labels.append(label)
        images.append(numpy.reshape(image, image_size * image_size))
        number_processed += 1

    # Code with test set
    # stores how many images of each category are in the training set
    #number_per_category_in_training = {c: 0.0 for c in range(NUM_LABELS)}

    for i, x in enumerate(images):
        # Code without test set
        train_images.append(x)
        train_labels.append(labels[i])

        # Code with test set
        #if number_per_category_in_training[category] < number_per_category[category] * train_ratio:
        #    number_per_category_in_training[category] += 1.0
        #    train_images.append(x)
        #    train_labels.append(labels[i])
        #else:
        #    test_images.append(x)
        #    test_labels.append(labels[i])

    train_images = numpy.array(train_images)

    # Code with test set
    #test_images = numpy.array(test_images)

    # transform labels into one-hot vectors
    one_hot_encoding = numpy.zeros((len(train_images), number_of_categories))
    one_hot_encoding[numpy.arange(len(train_images)), train_labels] = 1
    train_labels = numpy.reshape(one_hot_encoding, [-1, number_of_categories])

    # Code with test set
    #one_hot_encoding = numpy.zeros((len(test_images), number_of_categories))
    #one_hot_encoding[numpy.arange(len(test_images)), test_labels] = 1
    #test_labels = numpy.reshape(one_hot_encoding, [-1, number_of_categories])

    # Code without test set
    return train_images, train_labels, None, None
Esempio n. 9
0
    classifier.add(Dense(units=1, activation='sigmoid'))

    # Compiling the CNN
    classifier.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])

    # Part 2 - Fitting the CNN to the images
    batch_size = 32
    train_datagen = ImageDataGenerator(rescale=1. / 255,
                                       shear_range=0.2,
                                       zoom_range=0.2,
                                       horizontal_flip=True)

    test_datagen = ImageDataGenerator(rescale=1. / 255)

    training_set = train_datagen.flow_from_directory('G:\\ANALYTICS_WORLD_R_SAS\\python_world\\deep learning\\Convolutional_Neural_Networks\\dataset\\training_set',
                                                     target_size=input_size,
                                                     batch_size=batch_size,
                                                     class_mode='binary')

    test_set = test_datagen.flow_from_directory('G:\\ANALYTICS_WORLD_R_SAS\\python_world\\deep learning\\Convolutional_Neural_Networks\\dataset\\test_set',
                                                target_size=input_size,
                                                batch_size=batch_size,
                                                class_mode='binary')

    # Create a loss history
    history = LossHistory()

    classifier.fit_generator(training_set,
                             steps_per_epoch=8000/batch_size,
                             epochs=90,
                             validation_data=test_set,
                             validation_steps=2000/batch_size,
Esempio n. 10
0
script_dir = os.path.dirname(".")
training_set_path = os.path.join(script_dir, '/content/drive/My Drive/train/ProjTrain/')
test_set_path = os.path.join(script_dir, '/content/drive/My Drive/train/ProjTrain/')

batch_size = 32
input_size = (512,512)
train_datagen = ImageDataGenerator(rescale=1. / 255,
                                   shear_range=0.2,
                                   zoom_range=0.2,
                                   horizontal_flip=True)

test_datagen = ImageDataGenerator(rescale=1. / 255, validation_split=0.33)

training_set = train_datagen.flow_from_directory(training_set_path,
                                                 target_size=input_size,
                                                 batch_size=batch_size,
                                                 subset="training",
                                                 class_mode='categorical')



test_set = test_datagen.flow_from_directory(test_set_path,
                                            target_size=input_size,
                                            batch_size=batch_size,
                                            subset="validation",
                                            class_mode='categorical')



#########################
### Build the model