Beispiel #1
0
def train_and_evaluate(args):
    utils.download_data(args.data_dir, DATA_URL, unpack=True)
    train = utils.load_matlab_data("Y1", args.data_dir, DATA_FOLDER, "train")
    val = utils.load_matlab_data("Y1", args.data_dir, DATA_FOLDER, "val")
    train_dataset = (tf.data.Dataset.from_tensor_slices(train).repeat(
        args.num_epochs).shuffle(args.shuffle_buffer).batch(
            args.batch_size, drop_remainder=True))
    val_dataset = tf.data.Dataset.from_tensor_slices(val).batch(
        args.batch_size, drop_remainder=True)

    spdnet = model.create_model(args.learning_rate, num_classes=AFEW_CLASSES)

    os.makedirs(args.job_dir, exist_ok=True)
    checkpoint_path = os.path.join(args.job_dir, "afew-spdnet.ckpt")
    cp_callback = tf.keras.callbacks.ModelCheckpoint(filepath=checkpoint_path,
                                                     save_weights_only=True,
                                                     verbose=1)
    log_dir = os.path.join(args.job_dir, "logs")
    tb_callback = tf.keras.callbacks.TensorBoard(log_dir=log_dir)

    spdnet.fit(
        train_dataset,
        epochs=args.num_epochs,
        validation_data=val_dataset,
        callbacks=[cp_callback, tb_callback],
    )
    _, acc = spdnet.evaluate(val_dataset, verbose=2)
    print("Final accuracy: {}%".format(acc * 100))
Beispiel #2
0
def prepare_data():
    features, labels = utils.load_matlab_data("fea", args.data_dir, DATA_FOLDER)
    features = np.array([np.stack(example) for example in features.squeeze()])
    # reshape to [batch_size, spatial_dim, temp_dim, num_rows, num_cols]
    features = np.transpose(features, axes=[0, 1, 4, 2, 3])
    indices = np.random.permutation(len(features))
    features, labels = features[indices], labels[indices]
    val_len = int(len(features) * VAL_SPLIT)
    X_train, X_val = features[-val_len:, ...], features[:-val_len, ...]
    y_train, y_val = labels[-val_len:, ...], labels[:-val_len, ...]
    return (X_train, y_train), (X_val, y_val)