コード例 #1
0
ファイル: experiment.py プロジェクト: skyoung/MemDTC
def experiment():

    train_input_fn = generate_input_fn(is_train=True,
                                       tfrecords_path=config.tfrecords_path,
                                       batch_size=config.batch_size,
                                       time_step=config.time_step)

    eval_input_fn = generate_input_fn(is_train=False,
                                      tfrecords_path=config.tfrecords_path,
                                      batch_size=config.batch_size_eval,
                                      time_step=config.time_step_eval)

    estimator = Estimator(train_input_fn=train_input_fn,
                          eval_input_fn=eval_input_fn,
                          model_fn=model_fn)

    estimator.train()
コード例 #2
0
import numpy

# zde se importuje konkretni model
import models_source.model03 as source

create_model = source.create_model
epochs = source.epochs
batch_size = source.batch_size

# Priprav DATA
dataset = DatasetLoader("data/demography.csv", "data/Results_snemovna2017.csv")
inputset = dataset.get_input_acc_to_scheme(DataScheme.ALL)
outputset = dataset.get_outputset()
metadata = dataset.metadata

# test MODEL
estimator = Estimator(create_model)

estimator.train(inputset, outputset, epochs=epochs, batch_size=batch_size)

# jmeno python souboru jako string
modelname = (create_model.__module__).split('.')[1]

estimator.save_prediction_to_file(inputset, metadata,
                                  'data/predictions/' + modelname + '.csv')
# uloz model do souboru .h5
estimator.save_model_to_file()

#numpy.save('trained/' + modelname + '-' + str(epochs) + 'epoch' +
#               str(batch_size) + 'batch' + '.npy', result)
コード例 #3
0
            'type': Layers.FC,
            'input_len': 30,
            'num_neurons': 10,
            'activation': Activations.SIGMOID
        },
    ]

    # layers = [
    #     {'type': Layers.INPUT, 'dimensions': (1, 28, 28)},
    #     {'type': Layers.CONV, 'stride': 1, 'fshape': (6, 1, 3, 3), 'padding': 1, 'activation': Activations.RELU},
    #     {'type': Layers.MAXPOOL, 'stride': 3, 'length': 3},
    #     {'type': Layers.CONV, 'stride': 1, 'fshape': (12, 6, 3, 3), 'padding': 1, 'activation': Activations.RELU},
    #     {'type': Layers.MAXPOOL, 'stride': 3, 'length': 3},
    #     {'type': Layers.CONV, 'stride': 1, 'fshape': (24, 12, 3, 3), 'padding': 1, 'activation': Activations.RELU},
    #     {'type': Layers.MAXPOOL, 'stride': 3, 'length': 3},
    #     {'type': Layers.CONV, 'stride': 1, 'fshape': (24, 24, 1, 1), 'padding': 0, 'activation': Activations.RELU},
    #     {'type': Layers.CONV, 'stride': 1, 'fshape': (10, 24, 1, 1), 'padding': 0, 'activation': Activations.RELU},
    #     {'type': Layers.FLATTEN, 'input_len': 24, 'num_neurons': 10, 'activation': Activations.SIGMOID},
    # ]

    network = Network(layers)
    optimizer = MiniBatchGradientDescent(network,
                                         learning_rate=0.01,
                                         batch_size=30)
    estimator = Estimator(network=network,
                          optimizer=optimizer,
                          dataset=mnist,
                          transformer=transform_data)
    estimator.train(epochs=300)
    estimator.test()