示例#1
0
def main(args):
    trainer, predictor = build_model()

    if args.init_param_file:
        print "Loading parameters: %s" % args.init_param_file
        trainer.load_param_values(args.init_param_file)

    optimus.random_init(trainer.params['layer3'].weights)
    optimus.random_init(trainer.params['layer3'].bias)

    # 3. Create Data
    print "Loading %s" % args.training_file
    stash = biggie.Stash(args.training_file)
    stream = D.create_stash_stream(
        stash, TIME_DIM, pitch_shift=0, vocab_dim=VOCAB, pool_size=25)

    if args.secondary_source:
        stash2 = biggie.Stash(args.secondary_source)
        stream2 = D.create_uniform_chord_stream(
            stash2, TIME_DIM, pitch_shift=0, vocab_dim=VOCAB, working_size=5)
        stream = S.mux([stream, stream2], [0.5, 0.5])

    stream = S.minibatch(stream, batch_size=BATCH_SIZE)

    print "Starting '%s'" % args.trial_name
    driver = optimus.Driver(
        graph=trainer,
        name=args.trial_name,
        output_directory=args.model_directory)

    predictor_file = path.join(driver.output_directory, args.predictor_file)
    optimus.save(predictor, def_file=predictor_file)

    hyperparams = dict(learning_rate=LEARNING_RATE)
    driver.fit(stream, hyperparams=hyperparams, **DRIVER_ARGS)
示例#2
0
def main(args):
    trainer, predictor = build_model()

    if args.init_param_file:
        print "Loading parameters: %s" % args.init_param_file
        trainer.load_param_values(args.init_param_file)

    optimus.random_init(trainer.params['layer3'].weights)
    optimus.random_init(trainer.params['layer3'].bias)

    # 3. Create Data
    print "Loading %s" % args.training_file
    stash = biggie.Stash(args.training_file)
    stream = D.create_stash_stream(stash,
                                   TIME_DIM,
                                   pitch_shift=0,
                                   vocab_dim=VOCAB,
                                   pool_size=25)

    if args.secondary_source:
        stash2 = biggie.Stash(args.secondary_source)
        stream2 = D.create_uniform_chord_stream(stash2,
                                                TIME_DIM,
                                                pitch_shift=0,
                                                vocab_dim=VOCAB,
                                                working_size=5)
        stream = S.mux([stream, stream2], [0.5, 0.5])

    stream = S.minibatch(stream, batch_size=BATCH_SIZE)

    print "Starting '%s'" % args.trial_name
    driver = optimus.Driver(graph=trainer,
                            name=args.trial_name,
                            output_directory=args.model_directory)

    predictor_file = path.join(driver.output_directory, args.predictor_file)
    optimus.save(predictor, def_file=predictor_file)

    hyperparams = dict(learning_rate=LEARNING_RATE)
    driver.fit(stream, hyperparams=hyperparams, **DRIVER_ARGS)
示例#3
0
def main(args):
    # 1.1 Create Inputs
    input_data = optimus.Input(
        name='cqt',
        shape=(None, OCTAVE_DIM, TIME_DIM, PITCH_DIM))

    chord_idx = optimus.Input(
        name='chord_idx',
        shape=(None,),
        dtype='int32')

    learning_rate = optimus.Input(
        name='learning_rate',
        shape=None)

    # 1.2 Create Nodes
    layer0 = optimus.Conv3D(
        name='layer0',
        input_shape=input_data.shape,
        weight_shape=(32, None, 5, 5),
        pool_shape=(2, 3),
        act_type='relu')

    layer1 = optimus.Conv3D(
        name='layer1',
        input_shape=layer0.output.shape,
        weight_shape=(64, None, 5, 7),
        act_type='relu')

    layer2 = optimus.Conv3D(
        name='layer2',
        input_shape=layer1.output.shape,
        weight_shape=(128, None, 3, 6),
        act_type='relu')

    layer3 = optimus.Affine(
        name='layer3',
        input_shape=layer2.output.shape,
        output_shape=(None, 1024,),
        act_type='relu')

    chord_classifier = optimus.Softmax(
        name='chord_classifier',
        input_shape=layer3.output.shape,
        n_out=VOCAB,
        act_type='linear')

    all_nodes = [layer0, layer1, layer2, layer3, chord_classifier]

    # 1.1 Create Losses
    chord_nll = optimus.NegativeLogLikelihood(
        name="chord_nll")

    # 2. Define Edges
    trainer_edges = optimus.ConnectionManager([
        (input_data, layer0.input),
        (layer0.output, layer1.input),
        (layer1.output, layer2.input),
        (layer2.output, layer3.input),
        (layer3.output, chord_classifier.input),
        (chord_classifier.output, chord_nll.likelihood),
        (chord_idx, chord_nll.target_idx)])

    update_manager = optimus.ConnectionManager([
        (learning_rate, layer0.weights),
        (learning_rate, layer0.bias),
        (learning_rate, layer1.weights),
        (learning_rate, layer1.bias),
        (learning_rate, layer2.weights),
        (learning_rate, layer2.bias),
        (learning_rate, layer3.weights),
        (learning_rate, layer3.bias),
        (learning_rate, chord_classifier.weights),
        (learning_rate, chord_classifier.bias)])

    trainer = optimus.Graph(
        name=GRAPH_NAME,
        inputs=[input_data, chord_idx, learning_rate],
        nodes=all_nodes,
        connections=trainer_edges.connections,
        outputs=[optimus.Graph.TOTAL_LOSS],
        losses=[chord_nll],
        updates=update_manager.connections)

    for n in all_nodes:
        optimus.random_init(n.weights)
        optimus.random_init(n.bias)

    if args.init_param_file:
        print "Loading parameters: %s" % args.init_param_file
        trainer.load_param_values(args.init_param_file)

    posterior = optimus.Output(
        name='posterior')

    predictor_edges = optimus.ConnectionManager([
        (input_data, layer0.input),
        (layer0.output, layer1.input),
        (layer1.output, layer2.input),
        (layer2.output, layer3.input),
        (layer3.output, chord_classifier.input),
        (chord_classifier.output, posterior)])

    predictor = optimus.Graph(
        name=GRAPH_NAME,
        inputs=[input_data],
        nodes=all_nodes,
        connections=predictor_edges.connections,
        outputs=[posterior])

    # 3. Create Data
    print "Loading %s" % args.training_file
    stash = biggie.Stash(args.training_file)
    stream = D.create_uniform_chord_stream(
        stash, TIME_DIM, pitch_shift=0, vocab_dim=VOCAB, working_size=10,
        valid_idx=range(60) + [156])

    if args.secondary_source:
        stash2 = biggie.Stash(args.secondary_source)
        stream2 = D.create_uniform_chord_stream(
            stash2, TIME_DIM, pitch_shift=0, vocab_dim=VOCAB, working_size=5)
        stream = S.mux([stream, stream2], [0.5, 0.5])

    stream = S.minibatch(stream, batch_size=BATCH_SIZE)

    print "Starting '%s'" % args.trial_name
    driver = optimus.Driver(
        graph=trainer,
        name=args.trial_name,
        output_directory=args.model_directory)

    hyperparams = {learning_rate.name: LEARNING_RATE}

    predictor_file = path.join(driver.output_directory, args.predictor_file)
    optimus.save(predictor, def_file=predictor_file)

    driver.fit(stream, hyperparams=hyperparams, **DRIVER_ARGS)
示例#4
0
def main(args):
    # 1.1 Create Inputs
    input_data = optimus.Input(name='cqt',
                               shape=(None, OCTAVE_DIM, TIME_DIM, PITCH_DIM))

    chord_idx = optimus.Input(name='chord_idx', shape=(None, ), dtype='int32')

    learning_rate = optimus.Input(name='learning_rate', shape=None)

    # 1.2 Create Nodes
    layer0 = optimus.Conv3D(name='layer0',
                            input_shape=input_data.shape,
                            weight_shape=(32, None, 5, 5),
                            pool_shape=(2, 3),
                            act_type='relu')

    layer1 = optimus.Conv3D(name='layer1',
                            input_shape=layer0.output.shape,
                            weight_shape=(64, None, 5, 7),
                            act_type='relu')

    layer2 = optimus.Conv3D(name='layer2',
                            input_shape=layer1.output.shape,
                            weight_shape=(128, None, 3, 6),
                            act_type='relu')

    layer3 = optimus.Affine(name='layer3',
                            input_shape=layer2.output.shape,
                            output_shape=(
                                None,
                                1024,
                            ),
                            act_type='relu')

    chord_classifier = optimus.Softmax(name='chord_classifier',
                                       input_shape=layer3.output.shape,
                                       n_out=VOCAB,
                                       act_type='linear')

    all_nodes = [layer0, layer1, layer2, layer3, chord_classifier]

    # 1.1 Create Losses
    chord_nll = optimus.NegativeLogLikelihood(name="chord_nll")

    # 2. Define Edges
    trainer_edges = optimus.ConnectionManager([
        (input_data, layer0.input), (layer0.output, layer1.input),
        (layer1.output, layer2.input), (layer2.output, layer3.input),
        (layer3.output, chord_classifier.input),
        (chord_classifier.output, chord_nll.likelihood),
        (chord_idx, chord_nll.target_idx)
    ])

    update_manager = optimus.ConnectionManager([
        (learning_rate, layer0.weights), (learning_rate, layer0.bias),
        (learning_rate, layer1.weights), (learning_rate, layer1.bias),
        (learning_rate, layer2.weights), (learning_rate, layer2.bias),
        (learning_rate, layer3.weights), (learning_rate, layer3.bias),
        (learning_rate, chord_classifier.weights),
        (learning_rate, chord_classifier.bias)
    ])

    trainer = optimus.Graph(name=GRAPH_NAME,
                            inputs=[input_data, chord_idx, learning_rate],
                            nodes=all_nodes,
                            connections=trainer_edges.connections,
                            outputs=[optimus.Graph.TOTAL_LOSS],
                            losses=[chord_nll],
                            updates=update_manager.connections)

    for n in all_nodes:
        optimus.random_init(n.weights)
        optimus.random_init(n.bias)

    if args.init_param_file:
        print "Loading parameters: %s" % args.init_param_file
        trainer.load_param_values(args.init_param_file)

    posterior = optimus.Output(name='posterior')

    predictor_edges = optimus.ConnectionManager([
        (input_data, layer0.input), (layer0.output, layer1.input),
        (layer1.output, layer2.input), (layer2.output, layer3.input),
        (layer3.output, chord_classifier.input),
        (chord_classifier.output, posterior)
    ])

    predictor = optimus.Graph(name=GRAPH_NAME,
                              inputs=[input_data],
                              nodes=all_nodes,
                              connections=predictor_edges.connections,
                              outputs=[posterior])

    # 3. Create Data
    print "Loading %s" % args.training_file
    stash = biggie.Stash(args.training_file)
    stream = D.create_uniform_chord_stream(stash,
                                           TIME_DIM,
                                           pitch_shift=0,
                                           vocab_dim=VOCAB,
                                           working_size=4)

    if args.secondary_source:
        stash2 = biggie.Stash(args.secondary_source)
        stream2 = D.create_uniform_chord_stream(stash2,
                                                TIME_DIM,
                                                pitch_shift=0,
                                                vocab_dim=VOCAB,
                                                working_size=5)
        stream = S.mux([stream, stream2], [0.5, 0.5])

    stream = S.minibatch(stream, batch_size=BATCH_SIZE)

    print "Starting '%s'" % args.trial_name
    driver = optimus.Driver(graph=trainer,
                            name=args.trial_name,
                            output_directory=args.model_directory)

    hyperparams = {learning_rate.name: LEARNING_RATE}

    predictor_file = path.join(driver.output_directory, args.predictor_file)
    optimus.save(predictor, def_file=predictor_file)

    driver.fit(stream, hyperparams=hyperparams, **DRIVER_ARGS)
示例#5
0
def main(args):
    # 1.1 Create Inputs
    input_data = optimus.Input(
        name='cqt',
        shape=(None, OCTAVE_DIM, TIME_DIM, PITCH_DIM))

    chord_idx = optimus.Input(
        name='chord_idx',
        shape=(None,),
        dtype='int32')

    learning_rate = optimus.Input(
        name='learning_rate',
        shape=None)

    margin = optimus.Input(
        name='margin',
        shape=None)

    # 1.2 Create Nodes
    layer0 = optimus.Conv3D(
        name='layer0',
        input_shape=input_data.shape,
        weight_shape=(32, None, 5, 5),
        pool_shape=(2, 3),
        act_type='relu')

    layer1 = optimus.Conv3D(
        name='layer1',
        input_shape=layer0.output.shape,
        weight_shape=(64, None, 5, 7),
        act_type='relu')

    layer2 = optimus.Conv3D(
        name='layer2',
        input_shape=layer1.output.shape,
        weight_shape=(128, None, 3, 6),
        act_type='relu')

    layer3 = optimus.Affine(
        name='layer3',
        input_shape=layer2.output.shape,
        output_shape=(None, 1024,),
        act_type='relu')

    chord_estimator = optimus.Softmax(
        name='chord_estimator',
        input_shape=layer3.output.shape,
        output_shape=(None, VOCAB,),
        act_type='sigmoid')

    all_nodes = [layer0, layer1, layer2, layer3, chord_classifier]

    log = optimus.Log(name='log')
    neg = optimus.Gain(name='gain')
    neg.weight = np.array(-1)

    energy = optimus.SelectIndex(name='selector')

    loss = optimus.Mean(name='total_loss')

    # 1.1 Create Losses
    chord_margin = optimus.Margin(
        name="chord_margin",
        mode='max')

    # 2. Define Edges
    trainer_edges = optimus.ConnectionManager([
        (input_data, layer0.input),
        (layer0.output, layer1.input),
        (layer1.output, layer2.input),
        (layer2.output, layer3.input),
        (layer3.output, chord_estimator.input),
        (chord_estimator.output, log.input),
        (log.output, neg.input),
        (neg.output, energy.input),
        (chord_idx, energy.index),
        (energy.output, loss.input)])

    update_manager = optimus.ConnectionManager([
        (learning_rate, layer0.weights),
        (learning_rate, layer0.bias),
        (learning_rate, layer1.weights),
        (learning_rate, layer1.bias),
        (learning_rate, layer2.weights),
        (learning_rate, layer2.bias),
        (learning_rate, layer3.weights),
        (learning_rate, layer3.bias),
        (learning_rate, chord_estimator.weights),
        (learning_rate, chord_estimator.bias)])

    trainer = optimus.Graph(
        name=GRAPH_NAME,
        inputs=[input_data, chord_idx, learning_rate, margin],
        nodes=all_nodes,
        connections=trainer_edges.connections,
        outputs=[optimus.Graph.TOTAL_LOSS],
        loss=[loss.output],
        updates=update_manager.connections)

    for n in all_nodes:
        optimus.random_init(n.weights)
        optimus.random_init(n.bias)

    if args.init_param_file:
        print "Loading parameters: %s" % args.init_param_file
        trainer.load_param_values(args.init_param_file)

    for n in all_nodes[-2:]:
        optimus.random_init(n.weights)
        optimus.random_init(n.bias)

    posterior = optimus.Output(
        name='posterior')

    predictor_edges = optimus.ConnectionManager([
        (input_data, layer0.input),
        (layer0.output, layer1.input),
        (layer1.output, layer2.input),
        (layer2.output, layer3.input),
        (layer3.output, chord_classifier.input),
        (chord_classifier.output, posterior)])

    predictor = optimus.Graph(
        name=GRAPH_NAME,
        inputs=[input_data],
        nodes=all_nodes,
        connections=predictor_edges.connections,
        outputs=[posterior])

    # 3. Create Data
    print "Loading %s" % args.training_file
    stash = biggie.Stash(args.training_file)
    stream = D.create_stash_stream(
        stash, TIME_DIM, pitch_shift=0, vocab_dim=VOCAB, pool_size=25)

    if args.secondary_source:
        stash2 = biggie.Stash(args.secondary_source)
        stream2 = D.create_uniform_chord_stream(
            stash2, TIME_DIM, pitch_shift=0, vocab_dim=VOCAB, working_size=5)
        stream = S.mux([stream, stream2], [0.5, 0.5])

    stream = S.minibatch(stream, batch_size=BATCH_SIZE)

    print "Starting '%s'" % args.trial_name
    driver = optimus.Driver(
        graph=trainer,
        name=args.trial_name,
        output_directory=args.model_directory)

    hyperparams = {learning_rate.name: LEARNING_RATE,
                   margin.name: MARGIN}

    predictor_file = path.join(driver.output_directory, args.predictor_file)
    optimus.save(predictor, def_file=predictor_file)

    driver.fit(stream, hyperparams=hyperparams, **DRIVER_ARGS)