def pca_lda_graph(n_in=20, n_components=256, n_out=3): input_data = optimus.Input(name='cqt', shape=(None, 1, n_in, 192)) reshape = optimus.Flatten(name='flat', ndim=2) logscale = optimus.Log("logscale", 1.0) pca = optimus.CenteredAffine(name='pca', input_shape=(None, n_in * 192), output_shape=(None, n_components), act_type='linear') lda = optimus.CenteredAffine(name='lda', input_shape=(None, n_components), output_shape=(None, n_out), act_type='linear') embedding = optimus.Output(name='embedding') base_edges = [(input_data, reshape.input), (reshape.output, logscale.input), (logscale.output, pca.input), (pca.output, lda.input), (lda.output, embedding)] predictor = optimus.Graph( name='pca-lda', inputs=[input_data], nodes=[logscale, reshape, pca, lda], connections=optimus.ConnectionManager(base_edges).connections, outputs=[embedding], verbose=False) return predictor
def test_convolve(self): input_data = optimus.Input(name='x_in', shape=(None, 1, 1, 1)) flatten = optimus.Flatten(name='flatten', ndim=1) output_data = optimus.Output(name='x_out') edges = optimus.ConnectionManager([(input_data, flatten.input), (flatten.output, output_data)]) transform = optimus.Graph(name='test', inputs=[input_data], nodes=[flatten], connections=edges.connections, outputs=[output_data]) x = np.arange(10).reshape(1, 10, 1) y = np.array(['a', 'b']) entity = biggie.Entity(x_in=x, y=y) z = C.convolve(entity, transform, 'x_in') np.testing.assert_equal(z.x_out, np.arange(10)) np.testing.assert_equal(z.y, y)
def i8c3_pwmse(size='large'): k0, k1, k2 = dict( small=(8, 16, 20), med=(12, 24, 32), large=(16, 32, 48))[size] input_data = optimus.Input( name='cqt', shape=(None, 1, 8, 252)) target = optimus.Input( name='target', shape=(None, 1)) chord_idx = optimus.Input( name='chord_idx', shape=(None,), dtype='int32') learning_rate = optimus.Input( name='learning_rate', shape=None) # 1.2 Create Nodes layer0 = optimus.Conv3D( name='layer0', input_shape=input_data.shape, weight_shape=(k0, None, 3, 13), pool_shape=(1, 3), act_type='relu') layer1 = optimus.Conv3D( name='layer1', input_shape=layer0.output.shape, weight_shape=(k1, None, 3, 37), act_type='relu') layer2 = optimus.Conv3D( name='layer2', input_shape=layer1.output.shape, weight_shape=(k2, None, 3, 33), act_type='relu') chord_classifier = optimus.Conv3D( name='chord_classifier', input_shape=layer2.output.shape, weight_shape=(13, None, 2, 1), act_type='sigmoid') flatten = optimus.Flatten('flatten', 2) null_classifier = optimus.Affine( name='null_classifier', input_shape=layer2.output.shape, output_shape=(None, 1), act_type='sigmoid') cat = optimus.Concatenate('concatenate', num_inputs=2, axis=1) param_nodes = [layer0, layer1, layer2, chord_classifier, null_classifier] misc_nodes = [flatten, cat] # 1.1 Create Loss likelihoods = optimus.SelectIndex(name='likelihoods') dimshuffle = optimus.Dimshuffle('dimshuffle', (0, 'x')) squared_error = optimus.SquaredEuclidean(name='squared_error') loss = optimus.Mean(name='mean_squared_error') loss_nodes = [likelihoods, dimshuffle, squared_error, loss] # 2. Define Edges base_edges = [ (input_data, layer0.input), (layer0.output, layer1.input), (layer1.output, layer2.input), (layer2.output, chord_classifier.input), (layer2.output, null_classifier.input), (chord_classifier.output, flatten.input), (flatten.output, cat.input_0), (null_classifier.output, cat.input_1)] trainer_edges = optimus.ConnectionManager( base_edges + [ (cat.output, likelihoods.input), (chord_idx, likelihoods.index), (likelihoods.output, dimshuffle.input), (dimshuffle.output, squared_error.input_a), (target, squared_error.input_b), (squared_error.output, loss.input)]) update_manager = optimus.ConnectionManager( map(lambda n: (learning_rate, n.weights), param_nodes) + map(lambda n: (learning_rate, n.bias), param_nodes)) trainer = optimus.Graph( name=GRAPH_NAME, inputs=[input_data, target, chord_idx, learning_rate], nodes=param_nodes + misc_nodes + loss_nodes, connections=trainer_edges.connections, outputs=[loss.output], loss=loss.output, updates=update_manager.connections, verbose=True) classifier_init(param_nodes) posterior = optimus.Output(name='posterior') predictor_edges = optimus.ConnectionManager( base_edges + [(cat.output, posterior)]) predictor = optimus.Graph( name=GRAPH_NAME, inputs=[input_data], nodes=param_nodes + misc_nodes, connections=predictor_edges.connections, outputs=[posterior]) return trainer, predictor
def i8x1a3T_nll2(size, use_dropout=False): k0, k1, k2 = dict( large=(2048, 2048, 40),)[size] input_data = optimus.Input( name='data', shape=(None, 8, 1, 252)) chord_idx = optimus.Input( name='class_idx', shape=(None,), dtype='int32') learning_rate = optimus.Input( name='learning_rate', shape=None) inputs = [input_data, chord_idx, learning_rate] dropout = optimus.Input( name='dropout', shape=None) # 1.2 Create Nodes layer0 = optimus.Affine( name='layer0', input_shape=input_data.shape, output_shape=(None, k0), act_type='relu') layer1 = optimus.Affine( name='layer1', input_shape=layer0.output.shape, output_shape=(None, k1), act_type='relu') layer2 = optimus.Affine( name='layer2', input_shape=layer1.output.shape, output_shape=(None, k2, 1, 12), act_type='relu') dropout_edges = [] if use_dropout: layer0.enable_dropout() layer1.enable_dropout() layer2.enable_dropout() inputs += [dropout] dropout_edges += [(dropout, layer0.dropout), (dropout, layer1.dropout), (dropout, layer2.dropout)] chord_classifier = optimus.Conv3D( name='chord_classifier', input_shape=layer2.output.shape, weight_shape=(13, None, 1, 1), act_type='linear') flatten = optimus.Flatten('flatten', 2) null_classifier = optimus.Affine( name='null_classifier', input_shape=layer0.output.shape, output_shape=(None, 1), act_type='linear') cat = optimus.Concatenate('concatenate', num_inputs=2, axis=1) softmax = optimus.Softmax('softmax') prior = optimus.Multiply("prior", weight_shape=(1, 157), broadcast=[0]) prior.weight.value = np.ones([1, 157]) param_nodes = [layer0, layer1, layer2, null_classifier, chord_classifier] misc_nodes = [flatten, cat, softmax, prior] # 1.1 Create Loss likelihoods = optimus.SelectIndex(name='likelihoods') log = optimus.Log(name='log') neg = optimus.Multiply(name='gain', weight_shape=None) neg.weight.value = -1.0 loss = optimus.Mean(name='negative_log_likelihood') loss_nodes = [likelihoods, log, neg, loss] total_loss = optimus.Output(name='total_loss') posterior = optimus.Output(name='posterior') # 2. Define Edges base_edges = [ (input_data, layer0.input), (layer0.output, layer1.input), (layer1.output, layer2.input), (layer2.output, chord_classifier.input), (layer0.output, null_classifier.input), (chord_classifier.output, flatten.input), (flatten.output, cat.input_0), (null_classifier.output, cat.input_1), (cat.output, softmax.input), (softmax.output, prior.input), (prior.output, posterior)] trainer_edges = optimus.ConnectionManager( base_edges + dropout_edges + [ (softmax.output, likelihoods.input), (chord_idx, likelihoods.index), (likelihoods.output, log.input), (log.output, neg.input), (neg.output, loss.input), (loss.output, total_loss)]) update_manager = optimus.ConnectionManager( map(lambda n: (learning_rate, n.weights), param_nodes) + map(lambda n: (learning_rate, n.bias), param_nodes)) classifier_init(param_nodes) trainer = optimus.Graph( name=GRAPH_NAME, inputs=inputs, nodes=param_nodes + misc_nodes + loss_nodes, connections=trainer_edges.connections, outputs=[total_loss, posterior], loss=total_loss, updates=update_manager.connections, verbose=True) if use_dropout: layer0.disable_dropout() layer1.disable_dropout() layer2.disable_dropout() predictor = optimus.Graph( name=GRAPH_NAME, inputs=[input_data], nodes=param_nodes + misc_nodes, connections=optimus.ConnectionManager(base_edges).connections, outputs=[posterior], verbose=True) return trainer, predictor
def i8c4b10_nll_dropout(size='large'): k0, k1, k2 = dict( large=(24, 48, 64))[size] input_data = optimus.Input( name='cqt', shape=(None, 1, 8, 252)) chord_idx = optimus.Input( name='chord_idx', shape=(None,), dtype='int32') learning_rate = optimus.Input( name='learning_rate', shape=None) dropout = optimus.Input( name='dropout', shape=None) # 1.2 Create Nodes layer0 = optimus.Conv3D( name='layer0', input_shape=input_data.shape, weight_shape=(k0, None, 3, 13), pool_shape=(1, 3), act_type='relu') layer1 = optimus.Conv3D( name='layer1', input_shape=layer0.output.shape, weight_shape=(k1, None, 3, 37), act_type='relu') layer2 = optimus.Conv3D( name='layer2', input_shape=layer1.output.shape, weight_shape=(k2, None, 3, 33), act_type='relu') layer3 = optimus.Conv3D( name='layer3', input_shape=layer2.output.shape, weight_shape=(10, None, 2, 1), act_type='relu') chord_classifier = optimus.Conv3D( name='chord_classifier', input_shape=layer3.output.shape, weight_shape=(13, None, 1, 1), act_type='linear') flatten = optimus.Flatten('flatten', 2) null_classifier = optimus.Affine( name='null_classifier', input_shape=layer3.output.shape, output_shape=(None, 1), act_type='linear') cat = optimus.Concatenate('concatenate', num_inputs=2, axis=1) softmax = optimus.Softmax('softmax') param_nodes = [layer0, layer1, layer2, layer3, null_classifier, chord_classifier] misc_nodes = [flatten, cat, softmax] # 1.1 Create Loss likelihoods = optimus.SelectIndex(name='likelihoods') log = optimus.Log(name='log') neg = optimus.Gain(name='gain') neg.weight.value = -1.0 loss = optimus.Mean(name='negative_log_likelihood') loss_nodes = [likelihoods, log, neg, loss] total_loss = optimus.Output(name='total_loss') layer0.enable_dropout() layer1.enable_dropout() layer2.enable_dropout() # 2. Define Edges base_edges = [ (input_data, layer0.input), (layer0.output, layer1.input), (layer1.output, layer2.input), (layer2.output, layer3.input), (layer3.output, chord_classifier.input), (layer3.output, null_classifier.input), (chord_classifier.output, flatten.input), (flatten.output, cat.input_0), (null_classifier.output, cat.input_1), (cat.output, softmax.input)] trainer_edges = optimus.ConnectionManager( base_edges + [ (dropout, layer0.dropout), (dropout, layer1.dropout), (dropout, layer2.dropout), (softmax.output, likelihoods.input), (chord_idx, likelihoods.index), (likelihoods.output, log.input), (log.output, neg.input), (neg.output, loss.input), (loss.output, total_loss)]) update_manager = optimus.ConnectionManager( map(lambda n: (learning_rate, n.weights), param_nodes[:-1]) + map(lambda n: (learning_rate, n.bias), param_nodes[:-1])) trainer = optimus.Graph( name=GRAPH_NAME, inputs=[input_data, chord_idx, learning_rate, dropout], nodes=param_nodes + misc_nodes + loss_nodes, connections=trainer_edges.connections, outputs=[total_loss], loss=total_loss, updates=update_manager.connections, verbose=True) classifier_init(param_nodes[:-1]) semitones = L.semitone_matrix(157)[:13, 2:] chord_classifier.weights.value = semitones.reshape(13, 10, 1, 1) posterior = optimus.Output(name='posterior') predictor_edges = optimus.ConnectionManager( base_edges + [(softmax.output, posterior)]) layer0.disable_dropout() layer1.disable_dropout() layer2.disable_dropout() predictor = optimus.Graph( name=GRAPH_NAME, inputs=[input_data], nodes=param_nodes + misc_nodes, connections=predictor_edges.connections, outputs=[posterior]) return trainer, predictor
def i20c3_mse12(size='large'): k0, k1, k2 = dict( small=(10, 20, 40), med=(12, 24, 48), large=(16, 32, 64))[size] input_data = optimus.Input( name='cqt', shape=(None, 1, 20, 252)) target = optimus.Input( name='target', shape=(None, 12)) learning_rate = optimus.Input( name='learning_rate', shape=None) # 1.2 Create Nodes layer0 = optimus.Conv3D( name='layer0', input_shape=input_data.shape, weight_shape=(k0, None, 5, 13), pool_shape=(2, 3), act_type='relu') layer1 = optimus.Conv3D( name='layer1', input_shape=layer0.output.shape, weight_shape=(k1, None, 5, 37), pool_shape=(2, 1), act_type='relu') layer2 = optimus.Conv3D( name='layer2', input_shape=layer1.output.shape, weight_shape=(k2, None, 1, 33), pool_shape=(2, 1), act_type='relu') chroma_estimator = optimus.Conv3D( name='chord_classifier', input_shape=layer2.output.shape, weight_shape=(1, None, 1, 1), act_type='sigmoid') flatten = optimus.Flatten('flatten', 2) param_nodes = [layer0, layer1, layer2, chroma_estimator] misc_nodes = [flatten] # 1.1 Create Loss error = optimus.SquaredEuclidean(name='squared_error') loss = optimus.Mean(name='mean_squared_error') loss_nodes = [error, loss] chroma = optimus.Output(name='chroma') total_loss = optimus.Output(name='total_loss') # 2. Define Edges base_edges = [ (input_data, layer0.input), (layer0.output, layer1.input), (layer1.output, layer2.input), (layer2.output, chroma_estimator.input), (chroma_estimator.output, flatten.input), (flatten.output, chroma)] trainer_edges = optimus.ConnectionManager( base_edges + [ (flatten.output, error.input_a), (target, error.input_b), (error.output, loss.input), (loss.output, total_loss)]) update_manager = optimus.ConnectionManager( map(lambda n: (learning_rate, n.weights), param_nodes) + map(lambda n: (learning_rate, n.bias), param_nodes)) classifier_init(param_nodes) trainer = optimus.Graph( name=GRAPH_NAME, inputs=[input_data, target, learning_rate], nodes=param_nodes + misc_nodes + loss_nodes, connections=trainer_edges.connections, outputs=[total_loss, chroma], loss=total_loss, updates=update_manager.connections, verbose=True) predictor = optimus.Graph( name=GRAPH_NAME, inputs=[input_data], nodes=param_nodes + misc_nodes, connections=optimus.ConnectionManager(base_edges).connections, outputs=[chroma]) return trainer, predictor