コード例 #1
0
ファイル: transform_stash.py プロジェクト: saifrahmed/dl4mir
def main(stash_file, input_key, transform_file,
         param_file, output_file, verbose=True):
    transform = optimus.load(transform_file, param_file)
    stash = biggie.Stash(stash_file)
    futil.create_directory(os.path.split(output_file)[0])
    output = biggie.Stash(output_file)
    util.process_stash(stash, transform, output, input_key, verbose=verbose)
コード例 #2
0
def main(args):
    stash = biggie.Stash(args.input_file)
    futils.create_directory(path.split(args.output_file)[0])
    stash_out = biggie.Stash(args.output_file)
    total_count = len(stash)
    args = ['cqt', [2, 4, 8, 16, 32, 64, 128], 0, 'mean']
    for idx, key in enumerate(stash.keys()):
        stash_out.add(key, pool_entity(stash.get(key), *args))
        print "[%s] %12d / %12d: %s" % (time.asctime(), idx, total_count, key)

    stash_out.close()
コード例 #3
0
def main(args):
    dset = biggie.Stash(args.input_file)
    futils.create_directory(path.split(args.output_file)[0])
    dout = biggie.Stash(args.output_file)
    beat_times = json.load(open(args.beat_times))
    total_count = len(dset)
    for idx, key in enumerate(dset.keys()):
        boundaries = subdivide_boundaries(beat_times[key], args.subdivide)
        dout.add(
            key, beat_sync(dset.get(key), boundaries,
                           pool_func=args.pool_func))
        print "[%s] %12d / %12d: %s" % (time.asctime(), idx, total_count, key)

    dout.close()
コード例 #4
0
ファイル: wrap_stash.py プロジェクト: ejhumphrey/harmonic-cnn
def main(args):
    in_stash = biggie.Stash(args.data_file)
    utils.create_directory(os.path.dirname(args.output_file))
    if os.path.exists(args.output_file):
        os.remove(args.output_file)

    out_stash = biggie.Stash(args.output_file)
    STATUS['total'] = len(in_stash)
    for idx, key in enumerate(in_stash.keys()):
        new_entity = wrap_cqt_for_key(in_stash.get(key), args.length,
                                      args.stride)
        out_stash.add(key, new_entity)

    out_stash.close()
コード例 #5
0
ファイル: wrap_cqts.py プロジェクト: saifrahmed/dl4mir
def main(args):
    in_stash = biggie.Stash(args.data_file)
    futils.create_directory(os.path.split(args.output_file)[0])
    if os.path.exists(args.output_file):
        os.remove(args.output_file)

    out_stash = biggie.Stash(args.output_file)
    total_count = len(in_stash.keys())
    for idx, key in enumerate(in_stash.keys()):
        out_stash.add(key,
                      wrap_entity(in_stash.get(key), args.length, args.stride))
        print "[%s] %12d / %12d: %s" % (time.asctime(), idx, total_count, key)

    out_stash.close()
コード例 #6
0
def main(args):
    trainer, predictor = models.MODELS[args.model_name]()
    time_dim = trainer.inputs['data'].shape[2]

    if args.init_param_file:
        print "Loading parameters: %s" % args.init_param_file
        trainer.load_param_values(args.init_param_file)

    print "Opening %s" % args.training_file
    stash = biggie.Stash(args.training_file)
    stream = D.create_chord_index_stream(stash,
                                         time_dim,
                                         VOCAB,
                                         sample_func=D.slice_chroma_entity,
                                         working_size=25)

    stream = S.minibatch(stream, batch_size=BATCH_SIZE)

    print "Starting '%s'" % args.trial_name
    driver = optimus.Driver(graph=trainer,
                            name=args.trial_name,
                            output_directory=args.output_directory)

    hyperparams = dict(learning_rate=LEARNING_RATE, dropout=DROPOUT)

    predictor_file = path.join(driver.output_directory, args.predictor_file)
    optimus.save(predictor, def_file=predictor_file)

    driver.fit(stream, hyperparams=hyperparams, **DRIVER_ARGS)
コード例 #7
0
def main(args):
    trainer, predictor = models.MODELS[args.model_name]()
    time_dim = trainer.inputs['data'].shape[2]

    if args.init_param_file:
        print "Loading parameters: %s" % args.init_param_file
        trainer.load_param_values(args.init_param_file)

    print "Opening %s" % args.training_file
    stash = biggie.Stash(args.training_file, cache=True)

    stream = S.minibatch(D.create_target_stream(
        stash,
        time_dim,
        max_pitch_shift=0,
        bins_per_pitch=1,
        mapper=D.FX.note_numbers_to_pitch,
        sample_func=D.slice_note_entity),
                         batch_size=BATCH_SIZE)

    print "Starting '%s'" % args.trial_name
    driver = optimus.Driver(graph=trainer,
                            name=args.trial_name,
                            output_directory=args.output_directory)

    hyperparams = dict(learning_rate=LEARNING_RATE, dropout=DROPOUT)

    predictor_file = path.join(driver.output_directory, args.predictor_file)
    optimus.save(predictor, def_file=predictor_file)

    driver.fit(stream, hyperparams=hyperparams, **DRIVER_ARGS)
コード例 #8
0
ファイル: driver.py プロジェクト: saifrahmed/dl4mir
def main(args):
    sim_margin = -RADIUS * args.margin
    trainer, predictor, zerofilter = models.iX_c3f2_oY(20, 3, 'xlarge')
    time_dim = trainer.inputs['cqt'].shape[2]

    if args.init_param_file:
        print("Loading parameters: {0}".format(args.init_param_file))
        trainer.load_param_values(args.init_param_file)

    print("Opening {0}".format(args.training_file))
    stash = biggie.Stash(args.training_file, cache=True)
    stream = S.minibatch(
        D.create_pairwise_stream(stash, time_dim,
                                 working_size=100, threshold=0.05),
        batch_size=BATCH_SIZE)

    stream = D.batch_filter(
        stream, zerofilter, threshold=2.0**-16, min_batch=1,
        max_consecutive_skips=100, sim_margin=sim_margin, diff_margin=RADIUS)

    print("Starting '{0}'".format(args.trial_name))
    driver = optimus.Driver(
        graph=trainer,
        name=args.trial_name,
        output_directory=futil.create_directory(args.output_directory))

    hyperparams = dict(
        learning_rate=LEARNING_RATE,
        sim_margin=sim_margin, diff_margin=RADIUS)

    predictor_file = path.join(driver.output_directory, args.predictor_file)
    optimus.save(predictor, def_file=predictor_file)

    driver.fit(stream, hyperparams=hyperparams, **DRIVER_ARGS)
コード例 #9
0
def main(args):
    trainer, predictor = models.MODELS[args.model_name]()
    time_dim = trainer.inputs['data'].shape[2]

    if args.init_param_file:
        print "Loading parameters: %s" % args.init_param_file
        trainer.load_param_values(args.init_param_file)

    print "Opening %s" % args.training_file
    stash = biggie.Stash(args.training_file, cache=True)
    stream = D.create_chord_index_stream(stash,
                                         time_dim,
                                         max_pitch_shift=0,
                                         lexicon=VOCAB)

    # Load prior
    stat_file = "%s.json" % path.splitext(args.training_file)[0]
    prior = np.array(json.load(open(stat_file))['prior'], dtype=float)
    trainer.nodes['prior'].weight.value = 1.0 / prior.reshape(1, -1)

    stream = S.minibatch(stream, batch_size=BATCH_SIZE)

    print "Starting '%s'" % args.trial_name
    driver = optimus.Driver(graph=trainer,
                            name=args.trial_name,
                            output_directory=args.output_directory)

    hyperparams = dict(learning_rate=LEARNING_RATE, dropout=DROPOUT)

    predictor_file = path.join(driver.output_directory, args.predictor_file)
    optimus.save(predictor, def_file=predictor_file)

    driver.fit(stream, hyperparams=hyperparams, **DRIVER_ARGS)
コード例 #10
0
def main(args):
    trainer, predictor = models.MODELS[args.model_name]()
    time_dim = trainer.inputs['cqt'].shape[2]

    if args.init_param_file:
        print "Loading parameters: %s" % args.init_param_file
        trainer.load_param_values(args.init_param_file)

    print "Opening %s" % args.training_file
    stash = biggie.Stash(args.training_file, cache=True)
    stream = D.create_uniform_chord_stream(
        stash,
        time_dim,
        pitch_shift=0,
        vocab_dim=VOCAB,
        working_size=5,
    )

    stream = S.minibatch(stream, batch_size=BATCH_SIZE)

    print "Starting '%s'" % args.trial_name
    driver = optimus.Driver(graph=trainer,
                            name=args.trial_name,
                            output_directory=args.output_directory)

    hyperparams = dict(learning_rate=LEARNING_RATE)

    predictor_file = path.join(driver.output_directory, args.predictor_file)
    optimus.save(predictor, def_file=predictor_file)

    driver.fit(stream, hyperparams=hyperparams, **DRIVER_ARGS)
コード例 #11
0
def main(args):
    config = json.load(open(args.config))
    penalty_values = [float(_) for _ in config['penalty_values']]

    label_map = ENCODERS[args.label_type]
    for f in futils.load_textlist(args.posterior_filelist):
        print "[{0}] Decoding {1}".format(time.asctime(), f)
        # Read the whole stash to memory because the hdf5 reference doesn't
        #   survive parallelization.
        stash = biggie.Stash(f)
        keys = stash.keys()
        stash = {k: biggie.Entity(**stash.get(k).values()) for k in keys}

        # Parse the posterior stash filepath for its model's params
        parts = list(os.path.splitext(f)[0].split('outputs/')[-1].split('/'))
        if len(parts) == 4:
            parts.append("best")
        model, dropout, fold_idx, split, checkpoint = parts
        model_params = dict(model=model,
                            dropout=dropout,
                            fold_idx=fold_idx,
                            split=split,
                            checkpoint=checkpoint)

        output_dir = os.path.join(args.output_directory, checkpoint)
        posterior_stash_to_jams(stash, penalty_values, output_dir, label_map,
                                model_params)
コード例 #12
0
ファイル: pcalda.py プロジェクト: saifrahmed/dl4mir
def main(args):
    predictor = pca_lda_graph(20, args.n_components, 3)
    input_shape = list(predictor.inputs['cqt'].shape)
    time_dim = input_shape[2]
    input_shape[0] = args.num_points

    print("Opening {0}".format(args.training_file))
    stash = biggie.Stash(args.training_file, cache=True)
    stream = D.create_labeled_stream(stash,
                                     time_dim,
                                     working_size=1000,
                                     threshold=0.05)

    print("Starting '{0}'".format(args.trial_name))
    data, labels = np.zeros(input_shape), []
    for idx, x in enumerate(stream):
        data[idx, ...] = x.cqt
        labels.append(x.label)
        if len(labels) == args.num_points:
            break
        elif (len(labels) % PRINT_FREQ) == 0:
            print("[{0}] {1:5} / {2:5}"
                  "".format(time.asctime(), len(labels), args.num_points))

    predictor.param_values = fit_params(data, labels, args.n_components, 3)
    output_directory = futil.create_directory(args.output_directory)
    predictor_file = path.join(output_directory, args.predictor_file)
    param_file = predictor_file.replace(".json", ".npz")
    optimus.save(predictor, def_file=predictor_file, param_file=param_file)
コード例 #13
0
def main(args):
    """Main routine for importing data."""
    futils.create_directory(path.split(args.output_file)[0])
    if args.verbose:
        print "[%s] Creating: %s" % (time.asctime(), args.output_file)
    stash = biggie.Stash(args.output_file)
    populate_stash(futils.load_textlist(args.key_list), args.cqt_directory,
                   args.lab_directory, stash, np.float32)
コード例 #14
0
def main(args):
    stash = biggie.Stash(args.posterior_file)
    thresh = [0.02] * 5 + [0.0] * 9
    partition_labels = util.partition(stash, likelihood_threshold, thresh)
    partition_labels = dict([(k, v.tolist())
                             for k, v in partition_labels.items()])
    with open(args.partition_file, 'w') as fp:
        json.dump(partition_labels, fp)
コード例 #15
0
def main(args):
    fpath = os.path.join(args.data_directory, "{0}.hdf5")
    train = biggie.Stash(fpath.format('train'), cache=True)
    valid = biggie.Stash(fpath.format('valid'), cache=True)
    test = biggie.Stash(fpath.format('test'), cache=True)
    results = classify(train,
                       valid,
                       test,
                       num_train=50000,
                       num_valid=10000,
                       num_test=25000)

    for k in 'train', 'valid', 'test':
        print("{0}: {1:.4}".format(k, results['{0}_score'.format(k)]))

    output_dir = os.path.split(args.stats_file)[0]
    futil.create_directory(output_dir)
    with open(args.stats_file, 'w') as fp:
        json.dump(results, fp, indent=2)
コード例 #16
0
def main(args):
    dset = biggie.Stash(args.input_file)
    labseg = json.load(open(args.labseg))
    out_dir = futils.create_directory(args.output_directory)
    total_count = len(dset)
    for idx, key in enumerate(dset.keys()):
        out_file = path.join(out_dir, "%s.%s" % (key, FILE_EXT))
        mdict = entity_to_mdict(dset.get(key), labseg[key])
        MLAB.savemat(out_file, mdict=mdict)
        print "[%s] %12d / %12d: %s" % (time.asctime(), idx, total_count, key)
コード例 #17
0
ファイル: validation_sweep.py プロジェクト: saifrahmed/dl4mir
def main(args):
    param_files = futils.load_textlist(args.param_textlist)
    param_files.sort()
    param_files = param_files[args.start_index::args.stride]

    transform = optimus.load(args.transform_file)
    stash = biggie.Stash(args.validation_file, cache=True)
    output_dir = futils.create_directory(args.output_dir)

    for fidx, param_file in enumerate(param_files):
        transform.load_param_values(param_file)
        output_file = params_to_output_file(param_file, output_dir)
        futils.create_directory(os.path.split(output_file)[0])
        if os.path.exists(output_file):
            os.remove(output_file)

        output = biggie.Stash(output_file)
        util.process_stash(stash, transform, output,
                           args.field, verbose=args.verbose)
コード例 #18
0
def main(args):
    stash = biggie.Stash(args.input_file)
    futil.create_directory(path.split(args.output_file)[0])

    stats = dict()
    vocab = lex.Strict(157)

    stats['prior'] = D.class_prior_v157(stash, vocab).tolist()

    with open(args.output_file, 'w') as fp:
        json.dump(stats, fp)
コード例 #19
0
def main(args):
    class_data = np.load(args.centers_file)
    pca, ann = build_model(centers=class_data['centers'],
                           class_idxs=class_data['chord_idx'],
                           n_neighbors=args.n_neighbors,
                           n_components=args.n_components)
    pool = joblib.Parallel(n_jobs=NUM_CPUS)
    stash = biggie.Stash(args.stash_file)
    predictions = predict_all(stash, pca, ann)
    with open(args.output_file, 'w') as fp:
        json.dump(predictions, fp, indent=2)
コード例 #20
0
ファイル: wcqt_mse.py プロジェクト: saifrahmed/dl4mir
def main(args):
    trainer, predictor = build_model()

    if args.init_param_file:
        print "Loading parameters: %s" % args.init_param_file
        trainer.load_param_values(args.init_param_file)

    optimus.random_init(trainer.params['layer3'].weights)
    optimus.random_init(trainer.params['layer3'].bias)

    # 3. Create Data
    print "Loading %s" % args.training_file
    stash = biggie.Stash(args.training_file)
    stream = D.create_stash_stream(stash,
                                   TIME_DIM,
                                   pitch_shift=0,
                                   vocab_dim=VOCAB,
                                   pool_size=25)

    if args.secondary_source:
        stash2 = biggie.Stash(args.secondary_source)
        stream2 = D.create_uniform_chord_stream(stash2,
                                                TIME_DIM,
                                                pitch_shift=0,
                                                vocab_dim=VOCAB,
                                                working_size=5)
        stream = S.mux([stream, stream2], [0.5, 0.5])

    stream = S.minibatch(stream, batch_size=BATCH_SIZE)

    print "Starting '%s'" % args.trial_name
    driver = optimus.Driver(graph=trainer,
                            name=args.trial_name,
                            output_directory=args.model_directory)

    predictor_file = path.join(driver.output_directory, args.predictor_file)
    optimus.save(predictor, def_file=predictor_file)

    hyperparams = dict(learning_rate=LEARNING_RATE)
    driver.fit(stream, hyperparams=hyperparams, **DRIVER_ARGS)
コード例 #21
0
def main(args):
    """Main routine for importing data."""
    data_splits = json.load(open(args.split_file))

    output_file_fmt = path.join(args.output_directory, FILE_FMT)
    for fold in data_splits:
        for split in data_splits[fold]:
            output_file = output_file_fmt % (fold, split)
            futils.create_directory(path.split(output_file)[0])
            if args.verbose:
                print "[%s] Creating: %s" % (time.asctime(), output_file)
            stash = biggie.Stash(output_file)
            populate_stash(data_splits[fold][split], args.cqt_directory,
                           args.jams_directory, stash, np.float32)
コード例 #22
0
def main(args):
    stash = biggie.Stash(args.training_file)
    valid_idx = range(157)
    streams, chord_idx = chord_streamer(stash,
                                        win_length=20,
                                        working_size=4,
                                        valid_idx=valid_idx,
                                        n_samples=None,
                                        batch_size=50)

    quants = multi_fit(streams, args.n_clusters, args.n_iter)
    centers = np.array([q.cluster_centers_ for q in quants])
    counts = np.array([q.cluster_sizes_ for q in quants], dtype=int)
    np.savez(args.output_file,
             centers=centers,
             counts=counts,
             chord_idx=chord_idx)
コード例 #23
0
ファイル: file_importer.py プロジェクト: saifrahmed/dl4mir
def main(args):
    """Main routine for importing data."""
    partitions = json.load(open(args.split_file))

    output_file_fmt = path.join(args.output_directory, FILE_FMT)
    for set_name, subset in partitions.items():
        for fold_idx, splits in subset.items():
            for split, keys in splits.items():
                output_file = output_file_fmt.format(subset=set_name,
                                                     fold_idx=fold_idx,
                                                     split=split)
                futil.create_directory(path.split(output_file)[0])
                if args.verbose:
                    print("[{0}] Creating: {1}"
                          "".format(time.asctime(), output_file))
                stash = biggie.Stash(output_file)
                populate_stash(keys, args.cqt_directory, stash, np.float32)
                stash.close()
コード例 #24
0
def main(args):
    if not os.path.exists(args.posterior_file):
        print "File does not exist: %s" % args.posterior_file
        return
    dset = biggie.Stash(args.posterior_file)
    stats = json.load(open(args.validation_file))
    penalty = float(stats['best_config']['penalty'])

    estimations = dict()
    for idx, key in enumerate(dset.keys()):
        estimations[key] = estimate_classes(
            dset.get(key), util.viterbi, penalty=penalty)
        print "[%s] %12d / %12d: %s" % (time.asctime(), idx, len(dset), key)

    futil.create_directory(os.path.split(args.estimation_file)[0])
    with open(args.estimation_file, 'w') as fp:
        json.dump(estimations, fp, indent=2)

    futil.create_directory(os.path.split(args.estimation_file)[0])
    with open(args.estimation_file, 'w') as fp:
        json.dump(estimations, fp, indent=2)
コード例 #25
0
def main(args):
    # 1.1 Create Inputs
    input_data = optimus.Input(name='cqt', shape=(None, 1, TIME_DIM, 252))

    chord_idx = optimus.Input(name='chord_idx', shape=(None, ), dtype='int32')

    learning_rate = optimus.Input(name='learning_rate', shape=None)

    margin = optimus.Input(name='margin', shape=None)

    margin_weight = optimus.Input(name='margin_weight', shape=None)

    nll_weight = optimus.Input(name='nll_weight', shape=None)

    # 1.2 Create Nodes
    layer0 = optimus.Conv3D(name='layer0',
                            input_shape=input_data.shape,
                            weight_shape=(12, 1, 9, 19),
                            pool_shape=(1, 3),
                            act_type='relu')

    layer1 = optimus.Conv3D(name='layer1',
                            input_shape=layer0.output.shape,
                            weight_shape=(16, None, 7, 15),
                            act_type='relu')

    layer2 = optimus.Conv3D(name='layer2',
                            input_shape=layer1.output.shape,
                            weight_shape=(20, None, 6, 15),
                            act_type='relu')

    layer3 = optimus.Affine(name='layer3',
                            input_shape=layer2.output.shape,
                            output_shape=(None, 512),
                            act_type='relu')

    chord_classifier = optimus.Affine(name='chord_classifier',
                                      input_shape=layer3.output.shape,
                                      output_shape=(None, VOCAB),
                                      act_type='sigmoid')

    all_nodes = [layer0, layer1, layer2, layer3, chord_classifier]

    # 1.1 Create Losses
    nll = optimus.NegativeLogLikelihood(name="nll", weighted=True)

    likelihood_margin = optimus.LikelihoodMargin(name="likelihood_margin",
                                                 mode='l1',
                                                 weighted=True)

    # likelihood_margin = optimus.NLLMargin(
    #     name="likelihood_margin",
    #     mode='l2',
    #     weighted=True)

    # 2. Define Edges
    trainer_edges = optimus.ConnectionManager([
        (input_data, layer0.input), (layer0.output, layer1.input),
        (layer1.output, layer2.input), (layer2.output, layer3.input),
        (layer3.output, chord_classifier.input),
        (chord_classifier.output, likelihood_margin.likelihood),
        (chord_idx, likelihood_margin.target_idx),
        (margin, likelihood_margin.margin),
        (margin_weight, likelihood_margin.weight),
        (chord_classifier.output, nll.likelihood), (chord_idx, nll.target_idx),
        (nll_weight, nll.weight)
    ])

    update_manager = optimus.ConnectionManager([
        (learning_rate, layer0.weights), (learning_rate, layer0.bias),
        (learning_rate, layer1.weights), (learning_rate, layer1.bias),
        (learning_rate, layer2.weights), (learning_rate, layer2.bias),
        (learning_rate, layer3.weights), (learning_rate, layer3.bias),
        (learning_rate, chord_classifier.weights),
        (learning_rate, chord_classifier.bias)
    ])

    trainer = optimus.Graph(name=GRAPH_NAME,
                            inputs=[
                                input_data, chord_idx, margin, learning_rate,
                                margin_weight, nll_weight
                            ],
                            nodes=all_nodes,
                            connections=trainer_edges.connections,
                            outputs=[optimus.Graph.TOTAL_LOSS],
                            losses=[nll, likelihood_margin],
                            updates=update_manager.connections)

    optimus.random_init(chord_classifier.weights)
    optimus.random_init(chord_classifier.bias)

    validator = optimus.Graph(
        name=GRAPH_NAME,
        inputs=[input_data, chord_idx, margin, margin_weight, nll_weight],
        nodes=all_nodes,
        connections=trainer_edges.connections,
        outputs=[optimus.Graph.TOTAL_LOSS],
        losses=[nll, likelihood_margin])

    posterior = optimus.Output(name='posterior')

    predictor_edges = optimus.ConnectionManager([
        (input_data, layer0.input), (layer0.output, layer1.input),
        (layer1.output, layer2.input), (layer2.output, layer3.input),
        (layer3.output, chord_classifier.input),
        (chord_classifier.output, posterior)
    ])

    predictor = optimus.Graph(name=GRAPH_NAME,
                              inputs=[input_data],
                              nodes=all_nodes,
                              connections=predictor_edges.connections,
                              outputs=[posterior])

    # 3. Create Data
    stash = biggie.Stash(args.training_file)
    stream = S.minibatch(D.create_uniform_quality_stream(stash,
                                                         TIME_DIM,
                                                         vocab_dim=VOCAB),
                         batch_size=50)

    driver = optimus.Driver(graph=trainer,
                            name=args.trial_name,
                            output_directory=args.model_directory)

    hyperparams = {
        learning_rate.name: LEARNING_RATE,
        margin_weight.name: MARGIN_WEIGHT,
        nll_weight.name: NLL_WEIGHT,
        margin.name: MARGIN
    }

    driver.fit(stream, hyperparams=hyperparams, **DRIVER_ARGS)

    validator_file = path.join(driver.output_directory, args.validator_file)
    optimus.save(validator, def_file=validator_file)

    predictor_file = path.join(driver.output_directory, args.predictor_file)
    optimus.save(predictor, def_file=predictor_file)
コード例 #26
0
ファイル: cqt_l2norm_nll.py プロジェクト: saifrahmed/dl4mir
def main(args):
    # 1.1 Create Inputs
    input_data = optimus.Input(name='cqt',
                               shape=(None, 1, TIME_DIM, PITCH_DIM))

    chord_idx = optimus.Input(name='chord_idx', shape=(None, ), dtype='int32')

    learning_rate = optimus.Input(name='learning_rate', shape=None)

    # 1.2 Create Nodes
    input_scalar = optimus.Normalize(name='input_scalar',
                                     mode='l2',
                                     scale_factor=50.0)

    layer0 = optimus.Conv3D(name='layer0',
                            input_shape=input_data.shape,
                            weight_shape=(32, 1, 5, 19),
                            pool_shape=(2, 3),
                            act_type='relu')

    layer1 = optimus.Conv3D(name='layer1',
                            input_shape=layer0.output.shape,
                            weight_shape=(64, None, 5, 15),
                            act_type='relu')

    layer2 = optimus.Conv3D(name='layer2',
                            input_shape=layer1.output.shape,
                            weight_shape=(128, None, 3, 15),
                            act_type='relu')

    layer3 = optimus.Affine(name='layer3',
                            input_shape=layer2.output.shape,
                            output_shape=(
                                None,
                                1024,
                            ),
                            act_type='relu')

    chord_classifier = optimus.Softmax(name='chord_classifier',
                                       input_shape=layer3.output.shape,
                                       n_out=VOCAB,
                                       act_type='linear')

    all_nodes = [
        input_scalar, layer0, layer1, layer2, layer3, chord_classifier
    ]

    # 1.1 Create Losses
    chord_nll = optimus.NegativeLogLikelihood(name="chord_nll")

    # 2. Define Edges
    trainer_edges = optimus.ConnectionManager([
        (input_data, input_scalar.input), (input_scalar.output, layer0.input),
        (layer0.output, layer1.input), (layer1.output, layer2.input),
        (layer2.output, layer3.input), (layer3.output, chord_classifier.input),
        (chord_classifier.output, chord_nll.likelihood),
        (chord_idx, chord_nll.target_idx)
    ])

    update_manager = optimus.ConnectionManager([
        (learning_rate, layer0.weights), (learning_rate, layer0.bias),
        (learning_rate, layer1.weights), (learning_rate, layer1.bias),
        (learning_rate, layer2.weights), (learning_rate, layer2.bias),
        (learning_rate, layer3.weights), (learning_rate, layer3.bias),
        (learning_rate, chord_classifier.weights),
        (learning_rate, chord_classifier.bias)
    ])

    trainer = optimus.Graph(name=GRAPH_NAME,
                            inputs=[input_data, chord_idx, learning_rate],
                            nodes=all_nodes,
                            connections=trainer_edges.connections,
                            outputs=[optimus.Graph.TOTAL_LOSS],
                            losses=[chord_nll],
                            updates=update_manager.connections)

    for n in all_nodes[1:]:
        optimus.random_init(n.weights)
        optimus.random_init(n.bias)

    if args.init_param_file:
        param_values = dict(np.load(args.init_param_file))
        keys = param_values.keys()
        for key in keys:
            if chord_classifier.name in key or layer3.name in key:
                print "skipping %s" % key
                del param_values[key]
        trainer.param_values = param_values

    posterior = optimus.Output(name='posterior')

    predictor_edges = optimus.ConnectionManager([
        (input_data, input_scalar.input), (input_scalar.output, layer0.input),
        (layer0.output, layer1.input), (layer1.output, layer2.input),
        (layer2.output, layer3.input), (layer3.output, chord_classifier.input),
        (chord_classifier.output, posterior)
    ])

    predictor = optimus.Graph(name=GRAPH_NAME,
                              inputs=[input_data],
                              nodes=all_nodes,
                              connections=predictor_edges.connections,
                              outputs=[posterior])

    # 3. Create Data
    print "Loading %s" % args.training_file
    stash = biggie.Stash(args.training_file)
    stream = S.minibatch(D.create_uniform_chord_stream(stash,
                                                       TIME_DIM,
                                                       pitch_shift=0,
                                                       vocab_dim=VOCAB,
                                                       working_size=10),
                         batch_size=BATCH_SIZE)

    print "Starting '%s'" % args.trial_name
    driver = optimus.Driver(graph=trainer,
                            name=args.trial_name,
                            output_directory=args.model_directory)

    hyperparams = {learning_rate.name: LEARNING_RATE}

    predictor_file = path.join(driver.output_directory, args.predictor_file)
    optimus.save(predictor, def_file=predictor_file)

    driver.fit(stream, hyperparams=hyperparams, **DRIVER_ARGS)
コード例 #27
0
def main(args):
    # 1.1 Create Inputs
    input_data = optimus.Input(name='cqt', shape=(None, 6, TIME_DIM, 40))

    target = optimus.Input(name='target', shape=(None, VOCAB))

    learning_rate = optimus.Input(name='learning_rate', shape=None)

    # 1.2 Create Nodes
    layer0 = optimus.Conv3D(name='layer0',
                            input_shape=input_data.shape,
                            weight_shape=(32, None, 5, 5),
                            pool_shape=(2, 3),
                            act_type='relu')

    layer1 = optimus.Conv3D(name='layer1',
                            input_shape=layer0.output.shape,
                            weight_shape=(64, None, 5, 7),
                            act_type='relu')

    layer2 = optimus.Conv3D(name='layer2',
                            input_shape=layer1.output.shape,
                            weight_shape=(128, None, 3, 6),
                            act_type='relu')

    layer3 = optimus.Affine(name='layer3',
                            input_shape=layer2.output.shape,
                            output_shape=(
                                None,
                                1024,
                            ),
                            act_type='relu')

    chord_estimator = optimus.Affine(name='chord_estimator',
                                     input_shape=layer3.output.shape,
                                     output_shape=(
                                         None,
                                         VOCAB,
                                     ),
                                     act_type='sigmoid')

    all_nodes = [layer0, layer1, layer2, layer3, chord_estimator]

    # 1.1 Create Losses
    chord_xentropy = optimus.CrossEntropy(name="chord_xentropy")

    # 2. Define Edges
    trainer_edges = optimus.ConnectionManager([
        (input_data, layer0.input), (layer0.output, layer1.input),
        (layer1.output, layer2.input), (layer2.output, layer3.input),
        (layer3.output, chord_estimator.input),
        (chord_estimator.output, chord_xentropy.prediction),
        (target, chord_xentropy.target)
    ])

    update_manager = optimus.ConnectionManager([
        (learning_rate, layer0.weights), (learning_rate, layer0.bias),
        (learning_rate, layer1.weights), (learning_rate, layer1.bias),
        (learning_rate, layer2.weights), (learning_rate, layer2.bias),
        (learning_rate, layer3.weights), (learning_rate, layer3.bias),
        (learning_rate, chord_estimator.weights),
        (learning_rate, chord_estimator.bias)
    ])

    trainer = optimus.Graph(name=GRAPH_NAME,
                            inputs=[input_data, target, learning_rate],
                            nodes=all_nodes,
                            connections=trainer_edges.connections,
                            outputs=[optimus.Graph.TOTAL_LOSS],
                            losses=[chord_xentropy],
                            updates=update_manager.connections,
                            momentum=None)

    for n in all_nodes:
        optimus.random_init(n.weights, 0, 0.01)
        optimus.random_init(n.bias, 0, 0.01)

    validator = optimus.Graph(name=GRAPH_NAME,
                              inputs=[input_data, target],
                              nodes=all_nodes,
                              connections=trainer_edges.connections,
                              outputs=[optimus.Graph.TOTAL_LOSS],
                              losses=[chord_xentropy])

    posterior = optimus.Output(name='posterior')

    predictor_edges = optimus.ConnectionManager([
        (input_data, layer0.input), (layer0.output, layer1.input),
        (layer1.output, layer2.input), (layer2.output, layer3.input),
        (layer3.output, chord_estimator.input),
        (chord_estimator.output, posterior)
    ])

    predictor = optimus.Graph(name=GRAPH_NAME,
                              inputs=[input_data],
                              nodes=all_nodes,
                              connections=predictor_edges.connections,
                              outputs=[posterior])

    # 3. Create Data
    print "Loading %s" % args.training_file
    stash = biggie.Stash(args.training_file)
    # partition_labels = json.load(
    #     open("/home/ejhumphrey/Dropbox/tmp/train0_v2_merged_partition.json"))
    stream = D.create_uniform_chord_stream(stash,
                                           TIME_DIM,
                                           pitch_shift=False,
                                           vocab_dim=VOCAB,
                                           working_size=5)
    stream = S.minibatch(FX.chord_index_to_affinity_vectors(
        FX.wrap_cqt(stream, length=40, stride=36), VOCAB),
                         batch_size=BATCH_SIZE)

    print "Starting '%s'" % args.trial_name
    driver = optimus.Driver(graph=trainer,
                            name=args.trial_name,
                            output_directory=args.model_directory)

    hyperparams = {learning_rate.name: LEARNING_RATE}

    validator_file = path.join(driver.output_directory, args.validator_file)
    optimus.save(validator, def_file=validator_file)

    predictor_file = path.join(driver.output_directory, args.predictor_file)
    optimus.save(predictor, def_file=predictor_file)

    driver.fit(stream, hyperparams=hyperparams, **DRIVER_ARGS)
コード例 #28
0
def main(args):
    # 1.1 Create Inputs
    input_data = optimus.Input(name='cqt',
                               shape=(None, 1, TIME_DIM, PITCH_DIM))

    chord_idx = optimus.Input(name='chord_idx', shape=(None, ), dtype='int32')

    learning_rate = optimus.Input(name='learning_rate', shape=None)

    # 1.2 Create Nodes
    layer0 = optimus.Conv3D(name='layer0',
                            input_shape=input_data.shape,
                            weight_shape=(32, 1, 5, 19),
                            pool_shape=(2, 3),
                            act_type='relu')

    layer1 = optimus.Conv3D(name='layer1',
                            input_shape=layer0.output.shape,
                            weight_shape=(64, None, 5, 15),
                            act_type='relu')

    layer2 = optimus.Conv3D(name='layer2',
                            input_shape=layer1.output.shape,
                            weight_shape=(128, None, 3, 15),
                            act_type='relu')

    layer3 = optimus.Affine(name='layer3',
                            input_shape=layer2.output.shape,
                            output_shape=(
                                None,
                                1024,
                            ),
                            act_type='relu')

    chord_classifier = optimus.Softmax(name='chord_classifier',
                                       input_shape=layer3.output.shape,
                                       n_out=VOCAB,
                                       act_type='linear')

    all_nodes = [layer0, layer1, layer2, layer3, chord_classifier]

    # 1.1 Create Losses
    chord_mce = optimus.ClassificationError(name="chord_mce")

    # 2. Define Edges
    trainer_edges = optimus.ConnectionManager([
        (input_data, layer0.input), (layer0.output, layer1.input),
        (layer1.output, layer2.input), (layer2.output, layer3.input),
        (layer3.output, chord_classifier.input),
        (chord_classifier.output, chord_mce.prediction),
        (chord_idx, chord_mce.target_idx)
    ])

    update_manager = optimus.ConnectionManager([
        (learning_rate, layer0.weights), (learning_rate, layer0.bias),
        (learning_rate, layer1.weights), (learning_rate, layer1.bias),
        (learning_rate, layer2.weights), (learning_rate, layer2.bias),
        (learning_rate, layer3.weights), (learning_rate, layer3.bias),
        (learning_rate, chord_classifier.weights),
        (learning_rate, chord_classifier.bias)
    ])

    trainer = optimus.Graph(name=GRAPH_NAME,
                            inputs=[input_data, chord_idx, learning_rate],
                            nodes=all_nodes,
                            connections=trainer_edges.connections,
                            outputs=[optimus.Graph.TOTAL_LOSS],
                            losses=[chord_mce],
                            updates=update_manager.connections)

    for n in all_nodes:
        optimus.random_init(n.weights)
        optimus.random_init(n.bias)

    if args.init_param_file:
        trainer.load_param_values(args.init_param_file)

    posterior = optimus.Output(name='posterior')

    predictor_edges = optimus.ConnectionManager([
        (input_data, layer0.input), (layer0.output, layer1.input),
        (layer1.output, layer2.input), (layer2.output, layer3.input),
        (layer3.output, chord_classifier.input),
        (chord_classifier.output, posterior)
    ])

    predictor = optimus.Graph(name=GRAPH_NAME,
                              inputs=[input_data],
                              nodes=all_nodes,
                              connections=predictor_edges.connections,
                              outputs=[posterior])

    # 3. Create Data
    print "Loading %s" % args.training_file
    stash = biggie.Stash(args.training_file)
    synth_stash = biggie.Stash(args.secondary_source)
    stream = D.muxed_uniform_chord_stream(stash,
                                          synth_stash,
                                          TIME_DIM,
                                          pitch_shift=0,
                                          vocab_dim=VOCAB,
                                          working_size=10)
    # stream = D.create_uniform_chord_stream(
    #     stash, TIME_DIM, pitch_shift=0, vocab_dim=VOCAB, working_size=10)

    # if args.secondary_source:
    #     print "Loading %s" % args.secondary_source
    #     stash2 = biggie.Stash(args.secondary_source)
    #     stream2 = D.create_uniform_chord_stream(
    #         stash2, TIME_DIM, pitch_shift=0, vocab_dim=VOCAB, working_size=5)
    #     stream = S.mux([stream, stream2], [0.5, 0.5])

    stream = S.minibatch(stream, batch_size=BATCH_SIZE)

    print "Starting '%s'" % args.trial_name
    driver = optimus.Driver(graph=trainer,
                            name=args.trial_name,
                            output_directory=args.model_directory)

    hyperparams = {learning_rate.name: LEARNING_RATE}

    predictor_file = path.join(driver.output_directory, args.predictor_file)
    optimus.save(predictor, def_file=predictor_file)

    driver.fit(stream, hyperparams=hyperparams, **DRIVER_ARGS)
コード例 #29
0
def main(args):
    # 1.1 Create Inputs
    input_data = optimus.Input(
        name='cqt',
        shape=(None, 1, TIME_DIM, 252))

    chord_idx = optimus.Input(
        name='chord_idx',
        shape=(None,),
        dtype='int32')

    learning_rate = optimus.Input(
        name='learning_rate',
        shape=None)

    # 1.2 Create Nodes
    layer0 = optimus.Conv3D(
        name='layer0',
        input_shape=input_data.shape,
        weight_shape=(12, 1, 9, 19),
        pool_shape=(1, 3),
        act_type='relu')

    layer1 = optimus.Conv3D(
        name='layer1',
        input_shape=layer0.output.shape,
        weight_shape=(16, None, 7, 15),
        act_type='relu')

    layer2 = optimus.Conv3D(
        name='layer2',
        input_shape=layer1.output.shape,
        weight_shape=(20, None, 6, 15),
        act_type='relu')

    layer3 = optimus.Affine(
        name='layer3',
        input_shape=layer2.output.shape,
        output_shape=(None, 512,),
        act_type='relu')

    chord_classifier = optimus.Softmax(
        name='chord_classifier',
        input_shape=layer3.output.shape,
        n_out=VOCAB,
        act_type='linear')

    all_nodes = [layer0, layer1, layer2, layer3, chord_classifier]

    # 1.1 Create Losses
    chord_nll = optimus.NegativeLogLikelihood(
        name="chord_nll")

    # 2. Define Edges
    trainer_edges = optimus.ConnectionManager([
        (input_data, layer0.input),
        (layer0.output, layer1.input),
        (layer1.output, layer2.input),
        (layer2.output, layer3.input),
        (layer3.output, chord_classifier.input),
        (chord_classifier.output, chord_nll.likelihood),
        (chord_idx, chord_nll.target_idx)])

    update_manager = optimus.ConnectionManager([
        (learning_rate, layer0.weights),
        (learning_rate, layer0.bias),
        (learning_rate, layer1.weights),
        (learning_rate, layer1.bias),
        (learning_rate, layer2.weights),
        (learning_rate, layer2.bias),
        (learning_rate, layer3.weights),
        (learning_rate, layer3.bias),
        (learning_rate, chord_classifier.weights),
        (learning_rate, chord_classifier.bias)])

    print "Building trainer"
    trainer = optimus.Graph(
        name=GRAPH_NAME,
        inputs=[input_data, chord_idx, learning_rate],
        nodes=all_nodes,
        connections=trainer_edges.connections,
        outputs=[optimus.Graph.TOTAL_LOSS],
        losses=[chord_nll],
        updates=update_manager.connections)

    optimus.random_init(chord_classifier.weights)

    print "Building validator"
    validator = optimus.Graph(
        name=GRAPH_NAME,
        inputs=[input_data, chord_idx],
        nodes=all_nodes,
        connections=trainer_edges.connections,
        outputs=[optimus.Graph.TOTAL_LOSS],
        losses=[chord_nll])

    posterior = optimus.Output(
        name='posterior')

    predictor_edges = optimus.ConnectionManager([
        (input_data, layer0.input),
        (layer0.output, layer1.input),
        (layer1.output, layer2.input),
        (layer2.output, layer3.input),
        (layer3.output, chord_classifier.input),
        (chord_classifier.output, posterior)])

    print "Building predictor"
    predictor = optimus.Graph(
        name=GRAPH_NAME,
        inputs=[input_data],
        nodes=all_nodes,
        connections=predictor_edges.connections,
        outputs=[posterior])

    # 3. Create Data
    print "Opening Data"
    stash = biggie.Stash(args.training_file)
    stream = S.minibatch(
        D.create_uniform_quality_stream(stash, TIME_DIM),
        batch_size=50,
        functions=[FX.pitch_shift(), FX.map_to_chord_index(VOCAB)])

    driver = optimus.Driver(
        graph=trainer,
        name=args.trial_name,
        output_directory=args.model_directory)

    hyperparams = {learning_rate.name: LEARNING_RATE}

    print "...aaand we're off!"
    driver.fit(stream, hyperparams=hyperparams, **DRIVER_ARGS)

    validator_file = path.join(driver.output_directory, args.validator_file)
    optimus.save(validator, def_file=validator_file)

    predictor_file = path.join(driver.output_directory, args.predictor_file)
    optimus.save(predictor, def_file=predictor_file)
コード例 #30
0
def main(args):
    # 1.1 Create Inputs
    input_data = optimus.Input(name='cqt', shape=(None, 1, TIME_DIM, 252))

    chord_idx = optimus.Input(name='chord_idx', shape=(None, ), dtype='int32')

    is_chord = optimus.Input(name='is_chord', shape=(None, ))

    learning_rate = optimus.Input(name='learning_rate', shape=None)

    # 1.2 Create Nodes
    layer0 = optimus.Conv3D(name='layer0',
                            input_shape=input_data.shape,
                            weight_shape=(12, 1, 9, 19),
                            pool_shape=(1, 3),
                            act_type='relu')

    layer1 = optimus.Conv3D(name='layer1',
                            input_shape=layer0.output.shape,
                            weight_shape=(16, None, 7, 15),
                            act_type='relu')

    layer2 = optimus.Conv3D(name='layer2',
                            input_shape=layer1.output.shape,
                            weight_shape=(20, None, 6, 15),
                            act_type='relu')

    layer3 = optimus.Affine(name='layer3',
                            input_shape=layer2.output.shape,
                            output_shape=(
                                None,
                                512,
                            ),
                            act_type='relu')

    chord_estimator = optimus.Affine(name='chord_estimator',
                                     input_shape=layer3.output.shape,
                                     output_shape=(None, VOCAB),
                                     act_type='sigmoid')

    all_nodes = [layer0, layer1, layer2, layer3, chord_estimator]

    # 1.1 Create Losses
    chord_mse = optimus.SparseMeanSquaredError(
        # chord_mse = optimus.SparseCrossEntropy(
        name="chord_mse")

    # 2. Define Edges
    trainer_edges = optimus.ConnectionManager([
        (input_data, layer0.input), (layer0.output, layer1.input),
        (layer1.output, layer2.input), (layer2.output, layer3.input),
        (layer3.output, chord_estimator.input),
        (chord_estimator.output, chord_mse.prediction),
        (chord_idx, chord_mse.index), (is_chord, chord_mse.target)
    ])

    update_manager = optimus.ConnectionManager([
        (learning_rate, layer0.weights), (learning_rate, layer0.bias),
        (learning_rate, layer1.weights), (learning_rate, layer1.bias),
        (learning_rate, layer2.weights), (learning_rate, layer2.bias),
        (learning_rate, layer3.weights), (learning_rate, layer3.bias),
        (learning_rate, chord_estimator.weights),
        (learning_rate, chord_estimator.bias)
    ])

    print "Building trainer"
    trainer = optimus.Graph(
        name=GRAPH_NAME,
        inputs=[input_data, chord_idx, is_chord, learning_rate],
        nodes=all_nodes,
        connections=trainer_edges.connections,
        outputs=[optimus.Graph.TOTAL_LOSS],
        losses=[chord_mse],
        updates=update_manager.connections)

    optimus.random_init(chord_estimator.weights)

    print "Building validator"
    validator = optimus.Graph(name=GRAPH_NAME,
                              inputs=[input_data, chord_idx, is_chord],
                              nodes=all_nodes,
                              connections=trainer_edges.connections,
                              outputs=[optimus.Graph.TOTAL_LOSS],
                              losses=[chord_mse])

    posterior = optimus.Output(name='posterior')

    predictor_edges = optimus.ConnectionManager([
        (input_data, layer0.input), (layer0.output, layer1.input),
        (layer1.output, layer2.input), (layer2.output, layer3.input),
        (layer3.output, chord_estimator.input),
        (chord_estimator.output, posterior)
    ])

    print "Building predictor"
    predictor = optimus.Graph(name=GRAPH_NAME,
                              inputs=[input_data],
                              nodes=all_nodes,
                              connections=predictor_edges.connections,
                              outputs=[posterior])

    # 3. Create Data
    print "Opening Data"
    stash = biggie.Stash(args.training_file)
    stream = S.minibatch(D.create_contrastive_quality_stream(stash,
                                                             TIME_DIM,
                                                             vocab_dim=VOCAB),
                         batch_size=50)

    driver = optimus.Driver(graph=trainer,
                            name=args.trial_name,
                            output_directory=args.model_directory)

    hyperparams = {learning_rate.name: LEARNING_RATE}

    print "...aaand we're off!"
    driver.fit(stream, hyperparams=hyperparams, **DRIVER_ARGS)

    validator_file = path.join(driver.output_directory, args.validator_file)
    optimus.save(validator, def_file=validator_file)

    predictor_file = path.join(driver.output_directory, args.predictor_file)
    optimus.save(predictor, def_file=predictor_file)