Esempio n. 1
0
def train_model():
    # Create instance of model
    model = MyModel()
    SGD_OPTIMIZER = SGD(learning_rate=0.01, momentum=0.001, nesterov=False)
    model.compile(loss='categorical_crossentropy',
                  optimizer=SGD_OPTIMIZER,
                  metrics=["accuracy"])

    schedule_lr = LearningRateScheduler(lambda x: 1e-3 * 0.9**x)
    reduce_lr = ReduceLROnPlateau(monitor='val_loss',
                                  factor=0.2,
                                  patience=5,
                                  min_lr=0.001)

    (x_train, x_test, y_train, y_test) = load_dataset()

    # Call data generator
    datagen = data_generator()
    history = model.fit_generator(datagen.flow(x_train, y_train,
                                               batch_size=60),
                                  epochs=10,
                                  verbose=2,
                                  steps_per_epoch=500,
                                  validation_data=(x_test, y_test),
                                  callbacks=[schedule_lr, reduce_lr])

    if not os.path.exists("fashionClassifier"):
        os.makedirs("fashionClassifier")
        tf.saved_model.save(model, "fashionClassifier")
    else:
        tf.saved_model.save(model, "fashionClassifier")
Esempio n. 2
0
    trainset, valset, testset = data(name=config['dataset'])

    # model
    model = MyModel(blocks=config['blocks'],
                    channel=config['channel'],
                    scale=scale)

    # lr ExponentialDecay
    lr_schedule = tf.keras.optimizers.schedules.ExponentialDecay(
        config['learning_rate'],
        decay_steps=config['decay_steps'],
        decay_rate=config['decay_rate'])

    model.compile(
        optimizer=tf.keras.optimizers.Adam(learning_rate=lr_schedule),
        loss=Loss(),
        metrics=[PSNR(), SSIM()],
    )

    # resume checkpoint
    if config['resume']:
        model.build((None, None, None, 3))
        model.load_weights(checkpoint_path)

    # save the best model checkpoint
    model_checkpoint_callback = tf.keras.callbacks.ModelCheckpoint(
        filepath=checkpoint_path,
        save_weights_only=True,
        monitor='val_loss',
        mode='min',
        save_best_only=True)
Esempio n. 3
0
File: agent.py Progetto: kyri33/mlai
}, {
    'pair': 'gbpusd',
    'spread': 0.0001
}]

environments = []
agents = []

#test_env = FXEnv('eurusd')
state_size = (60, 16)
action_size = 7

model = MyModel(action_size)
sdae = MyAutoencoder()

model.compile(optimizer=ko.Adam(lr=0.0001), loss=[logits_loss, value_loss])
sdae.compile(optimizer=ko.Adam(lr=0.001), loss='mse')

for i in range(len(pairs)):
    environments.append(FXEnv(pairs[i]['pair'], spread=pairs[i]['spread']))
    agents.append(
        MyAgent(model,
                sdae,
                state_size,
                action_size,
                environments[i],
                nm=str(i)))

training = True

if training:
Esempio n. 4
0
    }

    train_generator = DataGenerator(train_set, dataset_size, params)
    valid_generator = DataGenerator(valid_set, dataset_size, params)
    test_generator = DataGenerator(test_set, dataset_size, params)

    checkpoint = ModelCheckpoint(Config['checkpoint_path'] + '/' + name,
                                 monitor='val_acc',
                                 verbose=1,
                                 save_best_only=False,
                                 mode='max')

    model = MyModel(n_classes).model
    # define optimizers
    model.compile(optimizer='adam',
                  loss='categorical_crossentropy',
                  metrics=['accuracy'])

    model.summary()

    print(Config)

    # training
    hist = model.fit(train_generator,
                     validation_data=valid_generator,
                     epochs=Config['num_epochs'],
                     callbacks=[checkpoint])

    loss = model.evaluate(test_generator)
    plt.plot(hist.history['accuracy'])
    plt.plot(hist.history['val_accuracy'])
Esempio n. 5
0
#sys.exit()
#model = DeepLabV3Plus(image_shape[0], image_shape[1], nclasses=4)
model = MyModel(4)
model.load_weights(weight_path+'fcn_20191021.ckpt')

#optimizer = tf.keras.optimizers.SGD(learning_rate=learning_rate, decay=0.0001)
optimizer = tf.keras.optimizers.Adam(
    learning_rate=learning_rate, beta_1=0.9, beta_2=0.999, epsilon=1e-07, amsgrad=False,
    name='Adam'
)

#mean_IOU = tf.keras.metrics.MeanIoU(num_classes=4)
model.compile(
    optimizer=optimizer,
    loss=tf.compat.v2.nn.softmax_cross_entropy_with_logits,
    metrics=['accuracy']
)


config = tf.compat.v1.ConfigProto()
config.gpu_options.allow_growth=True   
sess = tf.compat.v1.Session(config=config)

model.fit(train_dataset, epochs=num_epochs, callbacks=[tensorboard, checkpoint])
model.summary()