Ejemplo n.º 1
0
    tag_tr_fp = '../data/data.magnatagatune/tag_list.top188.txt'
    tag_te_fp = '../data/data.magnatagatune/tag_list.top{}.txt'.format(
        n_top_tags_te)

    # Model
    scale_list = [
        "logmelspec10000.16000_512_512_128.0.standard",
        "logmelspec10000.16000_1024_512_128.0.standard",
        "logmelspec10000.16000_2048_512_128.0.standard",
        "logmelspec10000.16000_4096_512_128.0.standard",
        "logmelspec10000.16000_8192_512_128.0.standard",
        "logmelspec10000.16000_16384_512_128.0.standard",
    ]

    # Load tag list
    tag_tr_list = utils.read_lines(tag_tr_fp)
    tag_te_list = utils.read_lines(tag_te_fp)

    label_idx_list = [tag_tr_list.index(tag) for tag in tag_te_list]

    # Load data
    print("Loading data...")
    X_te_list, y_te = utils.load_data_multiscale_te(data_dir, scale_list)

    # Building Network
    print("Building network...")
    num_scales = len(scale_list)
    network, input_var_list, _, _ = build_func(num_scales)

    # Computing loss
    target_var = T.matrix('targets')
Ejemplo n.º 2
0
    upscale_factor = 16  # The total pooling size from all convolution layers
    scale_list = [
        "logmelspec10000.16000_512_512_128.0.raw",
        "logmelspec10000.16000_1024_512_128.0.raw",
        "logmelspec10000.16000_2048_512_128.0.raw",
        "logmelspec10000.16000_4096_512_128.0.raw",
        "logmelspec10000.16000_8192_512_128.0.raw",
        "logmelspec10000.16000_16384_512_128.0.raw",
    ]
    feat_dir_list = [
        os.path.join(base_feat_dir, scale) for scale in scale_list
    ]
    num_scales = len(scale_list)
    if use_real_data:
        fn_list_fp = '../data/data.medleydb/fn.te.txt'
        fn_list = ['{}.npy'.format(fn) for fn in utils.read_lines(fn_list_fp)]
    else:
        fn_list = os.listdir(anno_dir)

    # Load tag list
    tag_idx_list = utils.get_test_tag_indices(tag_tr_fp, tag_te_fp,
                                              tag_conv_fp)

    # Standardizer dir
    std_fp_list = [
        os.path.join(standardizer_dir, scale.replace('.raw', ''), 'scaler.pkl')
        for scale in scale_list
    ]
    std_list = [utils.unpickle(std_fp) for std_fp in std_fp_list]
    std_list = [StandardScaler()
                for std_fp in std_list]  # initial StandardScaler() bz
Ejemplo n.º 3
0
    # Default setting
    upscale_factor = 16  # The total pooling size from all convolution layers
    scale_list = [
        "logmelspec10000.16000_512_512_128.0.raw",
        "logmelspec10000.16000_1024_512_128.0.raw",
        "logmelspec10000.16000_2048_512_128.0.raw",
        "logmelspec10000.16000_4096_512_128.0.raw",
        "logmelspec10000.16000_8192_512_128.0.raw",
        "logmelspec10000.16000_16384_512_128.0.raw",
    ]
    feat_dir_list = [os.path.join(base_feat_dir, scale) for scale in scale_list]
    num_scales = len(scale_list)
    if use_real_data:
        fn_list_fp = '../data/data.medleydb/fn.te.txt'
        fn_list = ['{}.npy'.format(fn)
                   for fn in utils.read_lines(fn_list_fp)]
    else:
        fn_list = os.listdir(anno_dir)

    # Load tag list
    tag_idx_list = utils.get_test_tag_indices(tag_tr_fp, tag_te_fp, tag_conv_fp)

    # Standardizer dir
    std_fp_list = [os.path.join(standardizer_dir,
                                scale.replace('.raw', ''),
                                'scaler.pkl')
                   for scale in scale_list]
    std_list = [utils.unpickle(std_fp) for std_fp in std_fp_list]

    # Building Network
    print("Building network...")