def test_or_val_generator(self, path):
     X = iu.train_generator(path + ".seq", self.batchsize, self.seq_len,
                            'seq', 'non-repeating')
     y = iu.train_generator(path + ".labels", self.batchsize, self.seq_len,
                            'labels', 'non-repeating')
     while True:
         yield X.next(), y.next()
Пример #2
0
def train_model(model, image_paths_traing, steering_angles_training,
                image_paths_val, steering_angles_val):
    checkpoint = ModelCheckpoint('model-{epoch:03d}.h5',
                                 monitor='val_loss',
                                 verbose=0,
                                 save_best_only=True,
                                 mode='auto')

    model.compile(loss="mse", optimizer=Adam(lr=1.0e-3))

    history_object = model.fit_generator(
        train_generator("./data/",
                        image_paths_traing,
                        steering_angles_training,
                        batch_size=128),
        samples_per_epoch=20000,
        nb_epoch=5,
        max_q_size=1,
        validation_data=val_generator("./data/",
                                      image_paths_val,
                                      steering_angles_val,
                                      batch_size=128),
        nb_val_samples=len(image_paths_val),
        callbacks=[checkpoint],
        verbose=1)
def train_model(model, image_paths_traing, steering_angles_training, image_paths_val, steering_angles_val):
    checkpoint = ModelCheckpoint('model-{epoch:03d}.h5',
        monitor='val_loss',
        verbose=0,
        save_best_only=True,
        mode='auto')
    
    model.compile(loss="mse", optimizer = Adam(lr=1.0e-3))
    
    history_object = model.fit_generator(
        train_generator("./data/", image_paths_traing, steering_angles_training, batch_size=128),
        samples_per_epoch = 20000,
        nb_epoch = 5,
        max_q_size = 1,
        validation_data = val_generator("./data/", image_paths_val, steering_angles_val, batch_size=128),
        nb_val_samples = len(image_paths_val),
        callbacks=[checkpoint],
        verbose=1)
Пример #4
0
    def fit(self, train_data, valid_data, epochs=10, batchsize=128, **kwargs):
        """Training function

        Evaluate at each epoch against validation data
        Save the best model according to the validation loss

        Parameters
        ----------
        train_data : tuple, (X_train, y_train)
            X_train.shape == (N, H, W, C)
            y_train.shape == (N, N_classes)

        valid_data : tuple
            (X_val, y_val)

        epochs : int
            Number of epochs to train

        batchsize : int
            Minibatch size

        **kwargs
            Keywords arguments for `fit_generator`
        """
        callback_best_only = ModelCheckpoint(self.path, save_best_only=True)
        train_gen, val_gen = train_generator()

        X_train, y_train = train_data
        X_val, y_val = valid_data

        N = X_train.shape[0]
        print("[DEBUG] N -> {}", X_train.shape)
        N_val = X_val.shape[0]

        self.model.fit_generator(train_gen.flow(X_train, y_train, batchsize),
                                 steps_per_epoch=N / batchsize,
                                 validation_data=val_gen.flow(
                                     X_val, y_val, batchsize),
                                 validation_steps=N_val / batchsize,
                                 epochs=epochs,
                                 callbacks=[callback_best_only],
                                 **kwargs)
Пример #5
0
def main():
    batches_per_epoch = 250
    generate_size = 200
    nb_epoch = 20
    print('1. Loading data.............')
    te_con_feature, te_emb_feature, te_seq_feature, vocabs_size = load_test_dataset(
    )

    n_con = te_con_feature.shape[1]
    n_emb = te_emb_feature.shape[1]
    print('1.1 merge con_feature,emb_feature,seq_feature.....')
    test_feature = prepare_inputX(te_con_feature, te_emb_feature,
                                  te_seq_feature)

    print('2. cluster.........')
    cluster_centers = h5py.File('cluster.h5', 'r')['cluster'][:]

    print('3. Building model..........')
    model = build_lstm(n_con, n_emb, vocabs_size, dis_size, emb_size,
                       cluster_centers.shape[0])
    checkPoint = ModelCheckpoint('weights/' + model_name + '.h5',
                                 save_best_only=True)
    earlystopping = EarlyStopping(patience=500)
    model.compile(loss=hdist,
                  optimizer='rmsprop')  #[loss = 'mse',optimizer= Adagrad]
    tr_generator = train_generator(generate_size)
    model.fit_generator(tr_generator,
                        samples_per_epoch=batches_per_epoch * generate_size,
                        nb_epoch=nb_epoch,
                        validation_data=getValData(),
                        verbose=1,
                        callbacks=[checkPoint, earlystopping])

    print('4. Predicting result .............')
    te_predict = model.predict(test_feature)
    save_results(te_predict, result_csv_path)
Пример #6
0
            plt.subplot(1, 3, 3)
            plt.imshow(im_prediction[:, :, 0])
            plt.show()


if __name__ == "__main__":

    # model = enet.build(len(utils.labels), configs.img_height, configs.img_width)
    # print(model.summary())

    model = ICNet(width=512, height=512, n_classes=len(utils.labels))
    print(model.model.summary())
    exit(0)
    df = load_data()

    train_generator = utils.train_generator(df, 1)

    # Plotting generator output
    # images, targets = next(train_generator)
    #
    # for i in range(len(images)):
    #     im_gt = np.array(targets[i])
    #     im_prediction = model.predict(np.array([images[i]]))[0]
    #     print im_prediction.shape
    #     print im_prediction[:, :, 0]
    #     plt.subplot(1, 3, 1)
    #     plt.imshow(np.array(images[i]))
    #     plt.subplot(1, 3, 2)
    #     plt.imshow(utils.convert_class_to_rgb(im_gt))
    #     plt.subplot(1, 3, 3)
    #     plt.imshow(utils.convert_class_to_rgb(im_prediction))
test_results = False
visualize_gen = False

if __name__ == "__main__":

    epochs = 2

    m = enet.build(len(utils.labels), configs.img_height, configs.img_width)
    m.load_weights("./enet-c-v1-2.h5")
    print(m.summary())

    label_path = configs.data_path + "extra_labels.csv"
    labels = pandas.read_csv(label_path).values

    train_generator = utils.train_generator(labels, 2)

    images, targets = next(train_generator)

    if visualize_gen:

        for i in range(2):
            im = np.array(images[i], dtype=np.uint8)
            im_mask = np.array(targets[i], dtype=np.uint8)
            plt.subplot(1, 3, 1)
            plt.imshow(im)
            plt.axis('off')
            plt.subplot(1, 3, 2)
            plt.imshow(utils.convert_class_to_rgb(im_mask))
            print(im_mask.shape)
            plt.axis('off')
Пример #8
0
    # Setting Callbacks
    callbacks = [
        utils.checkpoint_callback(selected_model.name),
        utils.tensorboard_callback(selected_model.name),
        utils.reducelr_callback()
    ]

    if args.early:
        callbacks.append(utils.earlystopping_callback())

    if TRAIN:
        # Train model
        history = selected_model.model.fit_generator(
            generator=utils.train_generator(DATA_TRAIN_DIR,
                                            selected_model.target_size,
                                            BATCH_SIZE,
                                            selected_model.input_type),
            steps_per_epoch=STEPS_PER_EPOCHS,
            callbacks=callbacks,
            validation_data=utils.valid_generator(DATA_VALID_DIR,
                                                  selected_model.target_size,
                                                  BATCH_SIZE,
                                                  selected_model.input_type),
            validation_steps=STEPS_PER_EPOCHS_VALID,
            epochs=EPOCHS)

    # Printing generalisation loss
    # print("====> Generalisation loss: ", selected_model.model.evaluate(Xtest, Ytest, batch_size=BATCH_SIZE))

    # Let's color some images
    TO_COLOR = args.to_color
Пример #9
0
val_best = 1000
iterations = 30

model = get_model()

#using Adam's Optimizer
adam = Adam(lr=1e-4, beta_1=0.9, beta_2=0.999, epsilon=1e-08)

model.compile(optimizer=adam, loss='mse')

#validation data generation
valid_gen = utils.validation_generator(df)

for iteration in range(iterations):

    train_gen = utils.train_generator(df, batch_size)
    history = model.fit_generator(train_gen,
                                  samples_per_epoch=256 * 79,
                                  nb_epoch=1,
                                  validation_data=valid_gen,
                                  nb_val_samples=len(df))

    utils.save_model('model_' + str(iteration) + '.json',
                     'model_' + str(iteration) + '.h5', model)

    val_loss = history.history['val_loss'][0]
    if val_loss < val_best:
        best_model = iteration
        val_best = val_loss
        utils.save_model('model.json', 'model.h5', model)
Пример #10
0
                  min_delta=0.00001,
                  mode='min'),
    ReduceLROnPlateau(monitor='val_loss',
                      factor=0.1,
                      patience=3,
                      epsilon=0.00001,
                      mode='min'),
    ModelCheckpoint(monitor='val_loss',
                    filepath="./wavenet_weight.hdf5",
                    save_best_only=True,
                    save_weights_only=True,
                    mode='min')
]

history = model.fit_generator(
    generator=train_generator(batch_size, input_dim, data_dir, sample_len,
                              default_offset),
    steps_per_epoch=train_step,
    epochs=epoch,
    callbacks=callbacks,
    validation_data=valid_generator(batch_size, input_dim, valid_data_dir,
                                    sample_len, default_offset),
    validation_steps=valid_step,
)

# if __name__ == '__main__':
#     onehot = load_data(file_name, data_path, input_dim)
#     sample_len = onehot.shape[0]
#     onehot = np.reshape(onehot, (1, -1, input_dim))
#     in_w = onehot[:, :-1, :]
#     out_w = onehot[:, 1:, :]
#
Пример #11
0
model = create_model((300,300,3))
print(model.summary()) #see the model
df_train = pd.read_csv('df_train_0.csv')
df_validation = pd.read_csv('df_validation_0.csv')

#make generator
train_gen = keras.preprocessing.image.ImageDataGenerator(rescale=1/255)
validation_gen = keras.preprocessing.image.ImageDataGenerator(rescale=1/255)
df_train['y1'] = df_train['y1'].astype('str')
df_validation['y1'] = df_validation['y1'].astype('str')
df_train['y2'] = df_train['y2'].astype('str')
df_validation['y2'] = df_validation['y2'].astype('str')
df_train['y3'] = df_train['y3'].astype('str')
df_validation['y3'] = df_validation['y3'].astype('str')
train_Gen = train_gen.flow_from_dataframe(df_train,directory=PATH,x_col='image_id',y_col=['y1','y2','y3'],target_size=(300,300),validate_filenames=False,batch_size=8,class_mode="multi_output")
training_Gen = train_generator(train_Gen,add_mask,8)
val_Gen = validation_gen.flow_from_dataframe(df_validation,directory=PATH,x_col='image_id',y_col=['y1','y2','y3'],target_size=(300,300),validate_filenames=False,class_mode="multi_output",batch_size=4)
validation_Gen = val_generator(val_Gen)

model.compile(keras.optimizers.Adam(lr=4e-4),loss={'root':'categorical_crossentropy','vowel':'categorical_crossentropy','consonant':'categorical_crossentropy'},loss_weights = {'root': 0.60,
                                'vowel': 0.20,
                                'consonant': 0.20},
              metrics={'root':['accuracy'],'vowel':['accuracy'],'consonant':['accuracy']})

class myCallback(keras.callbacks.Callback):
    def on_epoch_end(self,epoch,logs={}):
        '''
        change lr
        '''
        if epoch==0 or epoch==1 or epoch==3 or epoch==5 or epoch == 7 or epoch == 8 or epoch ==9 :
            lr = K.get_value(self.model.optimizer.lr)