Ejemplo n.º 1
0
 epochs = int(params.get('e', 60))
 bias_e = float(params.get('bias', 1.3))
 logging_helper.debug('epochs={}, batchsize={}, bias_e={}'.format(
     epochs, batch_size, bias_e))
 img_width, img_height = 512, 512
 # train_data_dir = '../pre_data'
 # validation_data_dir = '../val_data'
 nb_train_samples = 220
 nb_validation_samples = 100
 #
 img_dir = './images/origin-gen'
 test_dir = './images/img_test'
 samples_num = 5200
 model = create_noise_est_model(img_height=img_height,
                                img_width=img_width,
                                channel_num=3)
 callbacks = [
     my_callbacks.NoiseEstCallback(img_dir=test_dir,
                                   height=img_height,
                                   width=img_width,
                                   max_sigma=60)
 ]
 model.fit_generator(generator=noise_img_gen(img_dir=img_dir,
                                             batch_size=batch_size,
                                             sigma_max_value=60),
                     steps_per_epoch=samples_num // batch_size,
                     epochs=epochs,
                     callbacks=callbacks)
 model_helper.save_model(model=model,
                         struct_path='est_model.json',
                         weight_path='est_model.h5')
Ejemplo n.º 2
0
    feature_train, target_train, pred, mape = zip(* cleaned)
    start_time = time()
    model.fit(feature_train, target_train)
    print("re-training time after cleaning outliers", round(time() - start_time, 3), "s")

    # Start prediction using cross validation set
    start_time = time()
    pred = model.predict(feature_test)
    print("predition time", round(time() - start_time, 3), "s")

    # Show MAPE score of trained model with cross validation set
    score = mape_score(pred, target_test)
    print('MAPE score ', score)

    # Save model for later use
    save_model(model, './model', MODEL_NAME)





    from result_helper import save_result

    data_file = '../../processed_data/prediction_data/to_predict_features'
    test_metadata_list, test_features_list, test_target_list = load_and_split_data(data_file)
    test_features_list = feature_selection.transform(test_features_list)

    test_predictions = model.predict(test_features_list)

    OUTPUT_FOLDER = './output'
Ejemplo n.º 3
0
if __name__ == "__main__":
    # Load and split training data into metadata, features, target
    data_file = '../../processed_data/features_data/training_data/features_ready'
    metadata_list, features_list, target_list = load_and_split_data(data_file)

    # Split data into training set and cross validation set
    feature_train, feature_test, target_train, target_test = train_test_split(
        features_list,
        target_list,
        test_size=TEST_SIZE,
        random_state=RANDOM_STATE)

    # Start training
    start_time = time()
    model = create_model()
    model.fit(feature_train, target_train)
    print("training time", round(time() - start_time, 3), "s")

    # Start prediction using cross validation set
    start_time = time()
    pred = model.predict(feature_test)
    print("predition time", round(time() - start_time, 3), "s")

    # Show MAPE score of trained model with cross validation set
    score = mape_score(pred, target_test)
    print('MAPE score ', score)

    # Save model for later use
    save_model(model, './model', MODEL_NAME)
import sys
from model_helper import save_model

from keras.models import Sequential
from keras.layers.core import Dense, Activation, Flatten
from keras.layers import Convolution2D, MaxPooling2D

model_file = sys.argv[1]
weights_file = sys.argv[2]

model = Sequential()

model.add(Convolution2D(8, 5, 5, input_shape = (1, 28, 28)))
model.add(Activation('relu'))

model.add(MaxPooling2D(pool_size = (2, 2), strides = (2, 2)))

model.add(Convolution2D(16, 5, 5))
model.add(Activation('relu'))

model.add(MaxPooling2D(pool_size = (3, 3), strides = (3, 3)))

model.add(Flatten())
model.add(Dense(10))
model.add(Activation('softmax'))

save_model(model, model_file, weights_file)
Ejemplo n.º 5
0
    with tf.Session() as sess:
        sess.run(init)

        print('Learning...')
        for epoch in range(config.learning_epochs):
            _, cost = sess.run([optimizer, cost_func],
                               feed_dict={
                                   x: states,
                                   y: rewards
                               })
            print('Learning for X: Epoch ', epoch, ' of ',
                  config.learning_epochs, ' with cost of ', cost)

        print('saving model X...')
        model_helper.save_model(sess, 'tf-model/X', x, pred)

# Learn for O
with open('learn_o.dat', 'rb') as learn_o_file:
    states = []
    rewards = []

    print('Reading games...')
    while True:
        try:
            rows = pickle.load(learn_o_file).tolist()
            for row in rows:
                states.append(row)
            rows = pickle.load(learn_o_file).tolist()
            for row in rows:
                rewards.append(row)
Ejemplo n.º 6
0
import sys
from model_helper import save_model

from keras.models import Sequential
from keras.layers.core import Dense, Activation, Flatten
from keras.layers import Convolution2D, MaxPooling2D

model_file = sys.argv[1]
weights_file = sys.argv[2]

model = Sequential()

model.add(Convolution2D(8, 5, 5, input_shape=(1, 28, 28)))
model.add(Activation("relu"))

model.add(MaxPooling2D(pool_size=(2, 2), strides=(2, 2)))

model.add(Convolution2D(16, 5, 5))
model.add(Activation("relu"))

model.add(MaxPooling2D(pool_size=(3, 3), strides=(3, 3)))

model.add(Flatten())
model.add(Dense(10))
model.add(Activation("softmax"))

save_model(model, model_file, weights_file)
Ejemplo n.º 7
0
    print('### Trainging the model')
    stop_accuracy = args.accurecy / 100.0
    epoch = mh.train_model(device,
                           model,
                           trainloader,
                           optimizer,
                           criterion,
                           validloader,
                           verbose=True,
                           stop_accuracy=stop_accuracy)

    # Testing Network
    print('### Testing the model')
    accuracy = mh.test_model(device, model, criterion, testloader)
    print(f"Accuracy:  {accuracy:.2f}%")

    # Saving the model
    print('### Saving Model')
    filepath = args.save_dir
    mh.save_model(model,
                  arch,
                  train_dataset,
                  optimizer,
                  epoch,
                  filepath,
                  nHiddens=nHiddens,
                  nOutputs=nOutputs,
                  pDropout=pDropout,
                  lr=lr)

    print('Model Saved to: ', filepath)