Beispiel #1
0
import hat.backend as K
import config as cfg
import prepare_data as pp_data


# reshape image from N*1024 to N*3*32*32
def reshape_img_for_cnn(x):
    N = x.shape[0]
    return np.reshape(x, (N, 3, 32, 32))


# load data
tr_X, tr_y, te_X, te_y = pp_data.load_data()

# normalize data
scaler = pp_data.get_scaler(tr_X)
tr_X = pp_data.transform(tr_X, scaler)
te_X = pp_data.transform(te_X, scaler)

# reshape X to shape: (n_pictures, n_fmaps=3, n_row=32, n_col=32)
tr_X = reshape_img_for_cnn(tr_X)
te_X = reshape_img_for_cnn(te_X)

# init params
n_out = 10

# sparse label to 1-of-K categorical label
tr_y = sparse_to_categorical(tr_y, n_out)
te_y = sparse_to_categorical(te_y, n_out)
print tr_X.shape
print tr_y.shape
           optimizer=optimizer,
           callbacks=callbacks)


if __name__ == '__main__':
    # hyper-params
    n_concat = 11  # concatenate frames
    hop = 5  # step_len
    fold = 0  # can be 0, 1, 2, 3

    dev_fe_fd = cfg.dev_fe_logmel_fd
    eva_fd_fd = cfg.eva_fe_logmel_fd

    if sys.argv[1] == "--dev_train":
        scaler = pp_data.get_scaler(fe_fd=dev_fe_fd,
                                    csv_file=cfg.dev_tr_csv[fold],
                                    with_mean=True,
                                    with_std=True)
        train(tr_fe_fd=dev_fe_fd,
              tr_csv_file=cfg.dev_tr_csv[fold],
              te_fe_fd=dev_fe_fd,
              te_csv_file=cfg.dev_te_csv[fold],
              n_concat=n_concat,
              hop=hop,
              scaler=scaler,
              out_md_fd=cfg.dev_md_fd)

    elif sys.argv[1] == "--dev_recognize":
        scaler = pp_data.get_scaler(fe_fd=dev_fe_fd,
                                    csv_file=cfg.dev_tr_csv[fold],
                                    with_mean=True,
                                    with_std=True)
                      hop=hop)


if __name__ == '__main__':
    # hyper-params
    n_concat = cfg.n_concat  # concatenate frames
    hop = cfg.hop  # step_len
    fold = 0  # can be 0, 1, 2, 3

    # your workspace
    dev_feature = cfg.dev_mel
    eva_feature = cfg.eva_mel

    if sys.argv[1] == "--all":
        scaler = pp_data.get_scaler(fe_fd=dev_feature,
                                    csv_file=cfg.dev_tr[fold],
                                    with_mean=True,
                                    with_std=True)

        dev_train()
        scaler = pp_data.get_scaler(fe_fd=dev_feature,
                                    csv_file=cfg.dev_meta,
                                    with_mean=True,
                                    with_std=True)
        eva_train()

    elif sys.argv[1] == "--dev_train":
        scaler = pp_data.get_scaler(fe_fd=dev_feature,
                                    csv_file=cfg.dev_tr[fold],
                                    with_mean=True,
                                    with_std=True)
        #scaler = joblib.load(cfg.ld_sc)
           callbacks=callbacks)
           


if __name__ == '__main__':
    # hyper-params
    n_concat = 11        # concatenate frames
    hop = 5            # step_len
    fold = 0            # can be 0, 1, 2, 3
    
    dev_fe_fd = cfg.dev_fe_logmel_fd
    eva_fd_fd = cfg.eva_fe_logmel_fd
    
    if sys.argv[1] == "--dev_train": 
        scaler = pp_data.get_scaler(fe_fd=dev_fe_fd, 
                                    csv_file=cfg.dev_tr_csv[fold], 
                                    with_mean=True, 
                                    with_std=True)
        train(tr_fe_fd=dev_fe_fd, 
              tr_csv_file=cfg.dev_tr_csv[fold], 
              te_fe_fd=dev_fe_fd, 
              te_csv_file=cfg.dev_te_csv[fold], 
              n_concat=n_concat, 
              hop=hop, 
              scaler=scaler, 
              out_md_fd=cfg.dev_md_fd)
              
    elif sys.argv[1] == "--dev_recognize":
        scaler = pp_data.get_scaler(fe_fd=dev_fe_fd, 
                                    csv_file=cfg.dev_tr_csv[fold], 
                                    with_mean=True, 
                                    with_std=True)