示例#1
0
        entry = pairs.get(dataset.LABELS[i])
        if entry is None:
            continue
        if len(entry['x']) != 2:
            continue

        color = COLOR_MAP(to_color(i))
        plt.plot(entry['x'], entry['y'], color=color)

    if fname == None:
        plt.show()
    else:
        plt.savefig(fname=fname)
        print("Saved image to " + fname)


if __name__ == '__main__':
    import sys

    datasets = dataset.parse()
    siamese, _, _ = gradtype_model.create()
    siamese.load_weights(sys.argv[1])

    train_datasets, validate_datasets = dataset.split(datasets)
    train_datasets = dataset.trim_dataset(train_datasets)
    validate_datasets = dataset.trim_dataset(validate_datasets)
    train_coords = dataset.evaluate_model(siamese, train_datasets)
    validate_coords = dataset.evaluate_model(siamese, validate_datasets)
    fname = sys.argv[2] if len(sys.argv) >= 3 else None
    pca(train_coords, validate_coords, fname)
示例#2
0
    saver = tf.train.Saver(max_to_keep=0, name='hist-dist')
    restore = sys.argv[1]
    if restore.endswith('.index'):
        restore = restore[:-6]
    saver.restore(sess, restore)

    logging.debug('Loading dataset...')

    loaded = dataset.load(overlap=8)
    train_dataset = loaded['train']
    validate_dataset = loaded['validate']

    logging.debug('Trimming dataset...')

    train_dataset, _ = dataset.trim_dataset(train_dataset, random_state=SEED)
    validate_dataset, _ = dataset.trim_dataset(validate_dataset,
                                               random_state=SEED)

    train_dataset, _ = dataset.flatten_dataset(train_dataset,
                                               random_state=SEED)
    validate_dataset, _ = dataset.flatten_dataset(validate_dataset,
                                                  random_state=SEED)

    holds = []
    codes = []
    deltas = []
    sequence_lens = []

    for seq in train_dataset:
        holds.append(seq['holds'])
示例#3
0
model.compile(adam,
              loss='categorical_crossentropy',
              metrics=['accuracy', top_5])

#
# Train
#

tb = TensorBoard(histogram_freq=50,
                 log_dir=gradtype_utils.get_tensorboard_logdir())

callbacks = [tb]

for i in range(start_epoch, TOTAL_EPOCHS, SAVE_EPOCHS):
    end_epoch = i + SAVE_EPOCHS

    train_x = dataset.gen_regression(dataset.trim_dataset(train_datasets))
    train_y = gradtype_model.generate_one_hot_regression(train_x['labels'])

    model.fit(x=train_x,
              y=train_y,
              batch_size=4096,
              initial_epoch=i,
              epochs=end_epoch,
              callbacks=callbacks,
              validation_data=(validate_x, validate_y))

    print("Saving...")
    fname = './out/gradtype-regr-{:08d}.h5'.format(end_epoch)
    siamese.save_weights(fname)