def orchestra_embedding(self, orch_past):
        conv_layer = self.weights["orch_embed_conv"]
        conv_layer_timeDist = TimeDistributed(
            conv_layer,
            input_shape=(self.temporal_order - 1, self.orch_dim, 1))
        with tf.name_scope("build_orch_input"):
            # Format as a 4D
            input_seq = tf.reshape(
                orch_past, [-1, self.temporal_order - 1, self.orch_dim, 1])

        with tf.name_scope("conv_orch"):
            o0 = conv_layer_timeDist(input_seq)
            keras_layer_summary(conv_layer)

        # Remove the last useless dimension
        cropped_orch_dim = self.orch_dim - self.kernel_size_orch + 1
        o0 = tf.reshape(o0, [-1, self.temporal_order - 1, cropped_orch_dim])

        with tf.name_scope("gru"):
            orchestra_embedding = stacked_rnn(
                o0,
                self.hs_orch,
                rnn_type='gru',
                weight_decay_coeff=self.weight_decay_coeff,
                dropout_probability=self.dropout_probability,
                activation='relu')
        return orchestra_embedding
    def predict(self, inputs_ph):

        piano_t, piano_past, piano_future, orch_past, _ = inputs_ph
        
        with tf.name_scope("piano_embedding"):
            piano_embedding_past, piano_embedding_fut = self.piano_embedding(piano_t, piano_past, piano_future)

        with tf.name_scope("orchestra_embedding"):
            orchestra_embedding = self.orchestra_embedding(orch_past)

        #####################
        # Concatenate and predict
        with tf.name_scope("top_level_prediction"):
            with tf.name_scope("layer_0"):
                top_input = keras.layers.concatenate([orchestra_embedding, piano_embedding_past, piano_embedding_fut], axis=1)
                dense_layer = Dense(1000, activation='relu', name='orch_pred_0')
                top_0 = dense_layer(top_input)
                keras_layer_summary(dense_layer, collections=['weights'])
            with tf.name_scope("layer_1"):
                dense_layer = Dense(self.orch_dim, activation='sigmoid', name='orch_pred')
                orch_prediction = dense_layer(top_0)
                keras_layer_summary(dense_layer, collections=['weights'])
        #####################

        return orch_prediction, top_input
    def predict(self, inputs_ph):

        piano_t, piano_past, piano_future, orch_past, _ = inputs_ph

        with tf.name_scope("piano_embedding"):
            piano_t_emb, piano_past_emb, piano_future_emb = self.piano_embedding(
                piano_t, piano_past, piano_future)

        with tf.name_scope("orchestra_embedding"):
            orchestra_emb = self.orchestra_embedding(orch_past)

        #####################
        # Concatenate and predict
        with tf.name_scope("top_level_prediction"):
            embedding_concat = tf.concat(
                [orchestra_emb, piano_past_emb, piano_future_emb, piano_t_emb],
                axis=1)
            if self.dropout_probability > 0:
                top_input_drop = Dropout(
                    self.dropout_probability)(embedding_concat)
            else:
                top_input_drop = embedding_concat
            dense_layer = Dense(self.orch_dim,
                                activation='sigmoid',
                                name='orch_pred')
            orch_prediction = dense_layer(top_input_drop)
            keras_layer_summary(dense_layer)
        #####################

        return orch_prediction, orch_prediction
Пример #4
0
    def piano_embedding(self, piano_t, piano_seq, reverse):
        with tf.name_scope("build_piano_input"):
            # Add a time axis to piano_t
            piano_t_time = tf.reshape(piano_t, [-1, 1, self.piano_dim])
            # Concatenate t and future
            input_seq = tf.concat([piano_t_time, piano_seq], 1)
            if reverse:
                # Flip the matrix along the time axis so that the last time index is t
                input_seq = tf.reverse(input_seq, [1])
            # Format as a 4D
            input_seq = tf.reshape(input_seq, [-1, self.temporal_order, self.piano_dim, 1])

        with tf.name_scope("conv_piano"):
            conv_layer = Conv1D(1, self.kernel_size_piano, activation='relu')
            conv_layer_timeDist = TimeDistributed(conv_layer, input_shape=(self.temporal_order, self.piano_dim, 1))
            p0 = conv_layer_timeDist(input_seq)
            keras_layer_summary(conv_layer)

        # Remove the last useless dimension
        cropped_piano_dim = self.piano_dim-self.kernel_size_piano+1
        p0 = tf.reshape(p0, [-1, self.temporal_order, cropped_piano_dim])

        with tf.name_scope("gru"):
            piano_emb = stacked_rnn(p0, self.hs_piano, 
                rnn_type='gru', 
                weight_decay_coeff=self.weight_decay_coeff,
                dropout_probability=self.dropout_probability, 
                activation='relu'
                )
        return piano_emb
Пример #5
0
def MLP(x, layers, activation='relu'):
    for layer_ind, num_unit in enumerate(layers):
        with tf.variable_scope(str(layer_ind)):
            dense_layer = Dense(num_unit, activation=activation)
            x = dense_layer(x)
            keras_layer_summary(dense_layer)
    return x
    def predict(self, inputs_ph):

        piano_t, _, _, orch_past, _ = inputs_ph

        FiLM_Coeff = self.FiLM_generator(orch_past,
                                         self.weights['FiLM_generator'])

        # Unpack
        ind_start = 0
        ind_end = ind_start + self.FilM_dim_0
        beta_0 = FiLM_Coeff[:, ind_start:ind_end]
        ind_start = ind_end
        ind_end = ind_start + self.FilM_dim_0
        gamma_0 = FiLM_Coeff[:, ind_start:ind_end]
        ind_start = ind_end
        ind_end = ind_start + self.FilM_dim_1
        beta_1 = FiLM_Coeff[:, ind_start:ind_end]
        ind_start = ind_end
        ind_end = ind_start + self.FilM_dim_1
        gamma_1 = FiLM_Coeff[:, ind_start:ind_end]

        # Adapt input dimension
        first_layer = self.weights["first_layer"]
        x = first_layer(piano_t)
        keras_layer_summary(first_layer, collections=["weights"])

        x = self.residual_FiLM(x, gamma_0, beta_0, self.weights["block_0"])

        x = self.residual_FiLM(x, gamma_1, beta_1, self.weights["block_1"])

        last_layer = self.weights["last_layer"]
        orch_prediction = last_layer(x)
        keras_layer_summary(last_layer, collections=["weights"])

        return orch_prediction, orch_prediction
 def orchestra_embedding(self, orch_past):
     with tf.name_scope("past"):
         # Concatenate t and future
         x = orch_past
         for layer_ind, gru_layer in enumerate(
                 self.weights["past_orchestra"]):
             with tf.name_scope("l_" + str(layer_ind)):
                 x = gru_layer(x)
                 keras_layer_summary(gru_layer)
     return x
    def predict(self, inputs_ph, pitch_mask):
        piano_t, _, _, orch_t, orch_past, orch_future = inputs_ph

        with tf.name_scope("orch_past"):
            x = orch_past
            for layer in self.weights["orch_past"]:
                x = layer(x)
                keras_layer_summary(layer, collections=["weights"])
            orch_past_embedding = x

        with tf.name_scope("orch_future"):
            x = orch_future
            for layer in self.weights["orch_future"]:
                x = layer(x)
                keras_layer_summary(layer, collections=["weights"])
            orch_future_embedding = x

        with tf.name_scope("present_piano"):
            x = piano_t
            for layer in self.weights["piano_present"]:
                x = layer(x)
                keras_layer_summary(layer, collections=["weights"])
            piano_t_embedding = x

        with tf.name_scope("present_orchestra"):
            with tf.name_scope("build_input"):
                # pitch_mask_reshape = tf.reshape(pitch_mask, [1, self.orch_dim])
                pitch_mask_reshape = pitch_mask
                masked_orch_t = tf.multiply(orch_t, pitch_mask_reshape)
                x = tf.concat([masked_orch_t, pitch_mask_reshape], axis=1)
            for ind_layer, layer in enumerate(self.weights["orch_present"]):
                x = layer(x)
                keras_layer_summary(layer, collections=["weights"])
            orch_t_embedding = x

        with tf.name_scope("final_pred"):
            # Perhaps concatenate the mask ?
            x = tf.concat([
                orch_past_embedding, orch_future_embedding, piano_t_embedding,
                orch_t_embedding
            ],
                          axis=1)
            # for layer_ind in range(len(self.weights["final_pred"])-1):
            # 	layer = self.weights["final_pred"][layer_ind]
            # 	x = layer(x)
            # 	keras_layer_summary(layer, collections=["weights"])
            # W, b = self.weights["final_pred"][-1]
            # pred = tf.sigmoid(tf.matmul(x, W) + b)

            for layer in self.weights["final_pred"]:
                x = layer(x)
                keras_layer_summary(layer, collections=["weights"])
            pred = x

        return pred, pred
Пример #9
0
    def predict(self, inputs_ph, pitch_mask):
        piano_t, _, _, orch_t, orch_past, orch_future = inputs_ph

        with tf.name_scope("orch_past"):
            x = orch_past
            for layer in self.weights["orch_past"]:
                x = layer(x)
                keras_layer_summary(layer, collections=["weights"])
            orch_past_embedding = x

        with tf.name_scope("orch_future"):
            x = orch_future
            for layer in self.weights["orch_future"]:
                x = layer(x)
                keras_layer_summary(layer, collections=["weights"])
            orch_future_embedding = x

        with tf.name_scope("present_piano"):
            x = piano_t
            for layer in self.weights["piano_present"]:
                x = layer(x)
                keras_layer_summary(layer, collections=["weights"])
            piano_t_embedding = x

        with tf.name_scope("present_orchestra"):
            for ind_layer, layer in enumerate(self.weights["orch_present"]):
                if ind_layer == 0:
                    # Remove pitch from orch and weight
                    W, b = layer
                    with tf.name_scope("masking_orch_t"):
                        orch_t_masked = tf.boolean_mask(orch_t,
                                                        pitch_mask,
                                                        axis=1)
                        W_masked = tf.boolean_mask(W, pitch_mask, axis=0)
                    # Mask out the column corresponding to the predicted pitch
                    x = tf.matmul(orch_t_masked, W_masked) + b
                else:
                    x = layer(x)
                    keras_layer_summary(layer, collections=["weights"])
            orch_t_embedding = x

        with tf.name_scope("final_pred"):
            # Perhaps concatenate the mask ?
            x = tf.concat([
                orch_past_embedding, orch_future_embedding, piano_t_embedding,
                orch_t_embedding
            ],
                          axis=1)
            for layer in self.weights["final_pred"]:
                x = layer(x)
                keras_layer_summary(layer, collections=["weights"])
            pred = x

        # pred has size (batch, 1)
        return pred, pred
 def residual_FiLM(self, x, gamma, beta, layers):
     original = x
     # Stacked MLPs
     for layer in layers:
         x = layer(x)
         keras_layer_summary(layer, collections=["weights"])
     # FiLM
     x = tf.multiply(gamma, x) + beta
     # Relu
     x = keras.activations.relu(x)
     return x
Пример #11
0
    def predict(self, inputs_ph):

        piano_t, _, _, _, _ = inputs_ph

        #####################
        # Embedding piano
        with tf.name_scope("piano_embedding"):
            dense_layer = Dense(
                self.n_hs, activation='relu'
            )  # fully-connected layer with 128 units and ReLU activation
            piano_embedding_ = dense_layer(piano_t)
            if self.dropout_probability > 0:
                piano_embedding = Dropout(
                    self.dropout_probability)(piano_embedding_)
            else:
                piano_embedding = piano_embedding_
            keras_layer_summary(dense_layer, collections=["weights"])

            # num_filter = 30
            # conv_layer = Conv1D(num_filter, 12, activation='relu')
            # piano_t_reshape = tf.reshape(piano_t, [-1, self.piano_dim, 1])
            # piano_embedding = conv_layer(piano_t_reshape)
            # piano_embedding = tf.reshape(piano_embedding, [-1, (self.piano_dim-12+1) * num_filter])
            # keras_layer_summary(conv_layer, collections=["weights"])
        #####################

        #####################
        # Concatenate and predict
        with tf.name_scope("top_layer_prediction"):
            embedding_concat = piano_embedding
            if self.dropout_probability > 0:
                top_input_drop = Dropout(
                    self.dropout_probability)(embedding_concat)
            else:
                top_input_drop = embedding_concat
            # dense_layer = Dense(self.orch_dim, activation='relu', name='orch_pred')
            dense_layer = Dense(self.orch_dim,
                                activation='sigmoid',
                                name='orch_pred')
            orch_prediction = dense_layer(top_input_drop)
            keras_layer_summary(dense_layer, collections=["weights"])
        #####################

        return orch_prediction, embedding_concat


# 'temporal_order' : 5,
# 'dropout_probability' : 0,
# 'weight_decay_coeff' : 0,
# 'n_hidden': [500, 500],
Пример #12
0
	def predict_knowing_context(self, context_embedding_precomputed, orch_pred, mask):
		with tf.name_scope("predict_knowing_context"):
			# Build input
			x = tf.multiply(orch_pred - 0.5, mask)
			for dense in self.weights["MLP_NADE"]:
				x = dense(x)
				keras_layer_summary(dense, collections=["weights"])
				if self.dropout_probability > 0:
					x = Dropout(self.dropout_probability)(x)

			# concatenate
			x = tf.concat([context_embedding_precomputed, x], axis=1)
			dense = self.weights["last_MLP"]
			orch_prediction = dense(x)
			keras_layer_summary(dense, collections=["weights"])

		return orch_prediction, x
	def embed_context(self, inputs_ph):
		# MLP on concatenation of past orchestra and present piano
		with tf.name_scope("embed_context"):
			piano_t, _, _, orch_past, _ = inputs_ph

			orch_past_flat = tf.reshape(orch_past, [-1, (self.temporal_order-1) * self.orch_dim])
			x = tf.concat([piano_t, orch_past_flat], axis=1)

			for dense in self.weights["MLP_embed"]:
				x = dense(x)
				keras_layer_summary(dense, collections=["weights"])
				if self.dropout_probability > 0:
					x = Dropout(self.dropout_probability)(x)

			context_embedding = x
			if self.context_embedding_size is None:
				self.context_embedding_size = context_embedding.get_shape()[1]

		return context_embedding
Пример #14
0
    def predict_knowing_context(self, context_embedding, orch_pred, mask):
        with tf.name_scope("predict_knowing_context"):
            # Build input
            with tf.name_scope("build_input"):
                masked_orch_t = tf.multiply(orch_pred, mask)
                x = tf.concat([context_embedding, masked_orch_t, mask], axis=1)

            # Propagate as in a normal network
            for i, dense in enumerate(self.weights["NADE_mlp"]):
                with tf.name_scope("layer_" + str(i)):
                    x = dense(x)
                    keras_layer_summary(dense, collections=["weights"])
                    x = Dropout(self.dropout_probability)(x)

            # Last layer NADE_mlp
            dense = self.weights["NADE_mlp_last"]
            orch_prediction = dense(x)
            keras_layer_summary(dense, collections=["weights"])

        return orch_prediction, x
Пример #15
0
	def predict(self, inputs_ph):
		piano_t, _, _, orch_past, _ = inputs_ph
		
		# x = piano_t
		orch_past_flat = tf.reshape(orch_past, [-1, (self.temporal_order-1) * self.orch_dim])
		x = tf.concat([piano_t, orch_past_flat], axis=1)

		for i, l in enumerate(self.layers):
			with tf.name_scope("layer_" + str(i)):
				dense = Dense(l, activation='relu', kernel_regularizer=regularizers.l2(self.weight_decay_coeff))
				x = dense(x)
				keras_layer_summary(dense)
				x = Dropout(self.dropout_probability)(x)

		with tf.name_scope("predictive_layer"):
			dense = Dense(self.orch_dim, activation='sigmoid')
			orch_prediction = dense(x)
			keras_layer_summary(dense)

		return orch_prediction, None
Пример #16
0
    def embed_context(self, inputs_ph):
        with tf.name_scope("embed_context"):
            piano_t, _, _, orch_past, _ = inputs_ph

            #####################
            # GRU for modelling past orchestra
            # First layer
            with tf.name_scope("orchestra_past_embedding"):
                x = orch_past
                for layer_ind, gru_layer in enumerate(
                        self.weights["orchestra_emb_GRUs"]):
                    with tf.name_scope("orch_rnn_" + str(layer_ind)):
                        x = gru_layer(x)
                        keras_layer_summary(gru_layer, collections=["weights"])
                lstm_out = x
            #####################

            #####################
            # gru out and piano(t)
            with tf.name_scope("piano_present_embedding"):
                dense_layer = self.weights["piano_emb_MLP"]
                piano_embedding_ = dense_layer(piano_t)
                piano_embedding = Dropout(
                    self.dropout_probability)(piano_embedding_)
                keras_layer_summary(dense_layer, collections=["weights"])
            #####################

            #####################
            # Merge embeddings
            with tf.name_scope("merge_embeddings"):
                context_embedding = keras.layers.concatenate(
                    [lstm_out, piano_embedding], axis=1)
            #####################

            #####################
            # Context embedding size
            if self.context_embedding_size is None:
                self.context_embedding_size = context_embedding.get_shape()[1]
            #####################

        return context_embedding
Пример #17
0
    def predict(self, inputs_ph):

        piano_ph, _ = inputs_ph
        #####################
        # GRU for modelling past orchestra
        # First layer
        with tf.name_scope("orch_rnn_0"):
            gru_layer = GRU(self.n_hs[0], return_sequences=True, input_shape=(self.temporal_order, self.piano_dim), activation='relu', dropout=self.dropout_probability)
            x = gru_layer(piano_ph)
            keras_layer_summary(gru_layer, collections=["weights"])

        if len(self.n_hs) > 1:
            # Intermediates layers
            for layer_ind in range(1, len(self.n_hs)):
                with tf.name_scope("orch_rnn_" + str(layer_ind)):
                    gru_layer = GRU(self.n_hs[layer_ind], return_sequences=True, activation='relu', dropout=self.dropout_probability)
                    x = gru_layer(x)
                    keras_layer_summary(gru_layer, collections=["weights"])
        orch_pred = x

        return orch_pred, orch_pred
Пример #18
0
    def predict_knowing_context(self, context_embedding_precomputed, orch_pred,
                                mask):
        with tf.name_scope("predict_knowing_context"):
            # Build input
            with tf.name_scope("build_input"):
                masked_orch_t = tf.multiply(orch_pred, mask)
                x = tf.concat(
                    [context_embedding_precomputed, masked_orch_t, mask],
                    axis=1)

            with tf.name_scope("NADE_prediction"):
                # Propagate as in a normal network
                for dense in self.weights["MLP"]:
                    x = dense(x)
                    keras_layer_summary(dense, collections=["weights"])
                    x = Dropout(self.dropout_probability)(x)

                dense = self.weights["last_MLP"]
                orch_prediction = dense(x)
                keras_layer_summary(dense, collections=["weights"])

        return orch_prediction, x
Пример #19
0
	def predict(self, inputs_ph, orch_pred, mask):
		piano_t, _, _, orch_past, _ = inputs_ph
		
		# Build input
		with tf.name_scope("build_input"):
			orch_past_flat = tf.reshape(orch_past, [-1, (self.temporal_order-1) * self.orch_dim])
			masked_orch_t = tf.multiply(orch_pred, mask)
			x = tf.concat([piano_t, orch_past_flat, masked_orch_t, mask], axis=1)

		# Propagate as in a normal network
		for i, l in enumerate(self.layers):
			with tf.name_scope("layer_" + str(i)):
				dense = Dense(l, activation='relu')
				x = dense(x)
				keras_layer_summary(dense)
				x = Dropout(self.dropout_probability)(x)

		dense = Dense(self.orch_dim, activation='sigmoid')
		orch_prediction = dense(x)
		keras_layer_summary(dense)

		return orch_prediction, x
Пример #20
0
def stacked_rnn(input_seq,
                layers,
                rnn_type='gru',
                dropout_probability=0,
                activation='relu'):
    def layer_rnn(layer, rnn_type, return_sequences):
        if rnn_type is 'gru':
            this_layer = GRU(layer,
                             return_sequences=return_sequences,
                             activation=activation,
                             dropout=dropout_probability)
        return this_layer

    if len(layers) > 1:
        return_sequences = True
    else:
        return_sequences = False

    with tf.name_scope("0"):
        this_layer = layer_rnn(layers[0], rnn_type, return_sequences)
        x = this_layer(input_seq)
        keras_layer_summary(this_layer)

    if len(layers) > 1:
        # Intermediates layers
        for layer_ind in range(1, len(layers)):
            # Last layer ?
            if layer_ind == len(layers) - 1:
                return_sequences = False
            else:
                return_sequences = True
            with tf.name_scope(str(layer_ind)):
                this_layer = layer_rnn(layers[layer_ind], rnn_type,
                                       return_sequences)
                x = this_layer(x)
                keras_layer_summary(this_layer)

    return x
Пример #21
0
    def predict(self, inputs_ph):

        piano_t, _, _, orch_past, _ = inputs_ph

        gamma, beta = self.FiLM_generator(orch_past,
                                          self.weights['FiLM_generator'])

        # Unpack
        ind_end = 0
        x = piano_t
        layer = self.weights["MLP"]
        x = layer(x)
        keras_layer_summary(layer, collections=["weights"])

        # FiLM layer
        x = tf.multiply(gamma, x) + beta
        x = keras.activations.relu(x)

        last_layer = self.weights["last_layer"]
        orch_prediction = last_layer(x)
        keras_layer_summary(last_layer, collections=["weights"])

        return orch_prediction, orch_prediction
Пример #22
0
    def predict(self, inputs_ph):

        piano_t, piano_past, piano_future, orch_past, _ = inputs_ph
        
        with tf.name_scope("piano_embedding_past"):
            piano_embedding_past = self.piano_embedding(piano_t, piano_past, reverse=False)

        with tf.name_scope("piano_embedding_future"):
            piano_embedding_future = self.piano_embedding(piano_t, piano_future, reverse=True)

        with tf.name_scope("orchestra_embedding"):
            orchestra_embedding = self.orchestra_embedding(orch_past)

        #####################
        # Concatenate and predict
        with tf.name_scope("top_layer_prediction_0"):
            top_input = keras.layers.concatenate([orchestra_embedding, piano_embedding_past, piano_embedding_future], axis=1)
            dense_layer = Dense(1000, activation='relu', name='orch_pred_0')
            top_0 = dense_layer(top_input)
            keras_layer_summary(dense_layer)
        with tf.name_scope("top_layer_prediction_1"):
            dense_layer = Dense(self.orch_dim, activation='sigmoid', name='orch_pred')
            orch_prediction = dense_layer(top_0)
            keras_layer_summary(dense_layer)
        #####################

        return orch_prediction, top_input


# 'temporal_order' : 5,
# 'dropout_probability' : 0,
# 'weight_decay_coeff' : 0,
# 'kernel_size_piano': 12,
# 'kernel_size_orch': 12,
# 'hs_piano': [500],
# 'hs_orch': [600],
# 'embeddings_size': 500,
Пример #23
0
    def FiLM_generator(self, x, layers):
        for gru_layer in layers:
            x = gru_layer(x)
            keras_layer_summary(gru_layer, collections=["weights"])

        gamma_layer = self.weights["gamma"]
        beta_layer = self.weights["beta"]
        keras_layer_summary(gamma_layer, collections=["weights"])
        keras_layer_summary(beta_layer, collections=["weights"])

        gamma = gamma_layer(x)
        beta = beta_layer(x)

        return gamma, beta
    def predict(self, inputs_ph, pitch_mask):
        piano_t, _, _, orch_t, orch_past, orch_future = inputs_ph

        with tf.name_scope("orch_past"):
            x = orch_past
            for layer in self.weights["orch_past"]:
                x = layer(x)
                keras_layer_summary(layer, collections=["weights"])
            orch_past_embedding = x

        with tf.name_scope("orch_future"):
            x = orch_future
            for layer in self.weights["orch_future"]:
                x = layer(x)
                keras_layer_summary(layer, collections=["weights"])
            orch_future_embedding = x

        with tf.name_scope("present_piano"):
            x = piano_t
            for layer in self.weights["piano_present"]:
                x = layer(x)
                keras_layer_summary(layer, collections=["weights"])
            piano_t_embedding = x

        with tf.name_scope("present_orchestra"):
            x = orch_t
            for ind_layer, layer in enumerate(self.weights["orch_present"]):
                if ind_layer == 0:
                    # Remove pitch from orch and weight
                    W, b = layer
                    # Mask out the column corresponding to the predicted pitch
                    x = tf.matmul(x, tf.multiply(W, pitch_mask)) + b
                else:
                    import pdb
                    pdb.set_trace()
                    x = layer(x)
                    keras_layer_summary(layer, collections=["weights"])
            orch_t_embedding = x
        return x, x
    def piano_embedding(self, piano_t, piano_past, piano_future):
        # Build input
        # Add a time axis to piano_t
        piano_t_time = tf.reshape(piano_t, [-1, 1, self.piano_dim])

        with tf.name_scope("present"):
            # Concatenate t and future
            x = piano_t
            for layer_ind, gru_layer in enumerate(
                    self.weights["present_piano"]):
                with tf.name_scope("l_" + str(layer_ind)):
                    x = gru_layer(x)
                    keras_layer_summary(gru_layer)
            piano_t_emb = x

        with tf.name_scope("past"):
            # Concatenate t and future
            x = tf.concat([piano_past, piano_t_time], 1)
            for layer_ind, gru_layer in enumerate(self.weights["past_piano"]):
                with tf.name_scope("l_" + str(layer_ind)):
                    x = gru_layer(x)
                    keras_layer_summary(gru_layer)
            piano_past_emb = x

        with tf.name_scope("future"):
            # Concatenate t and future
            input_seq_fut = tf.concat([piano_t_time, piano_future], axis=1)
            # Flip the matrix along the time axis so that the last time index is t
            x = tf.reverse(input_seq_fut, axis=[1])
            for layer_ind, gru_layer in enumerate(
                    self.weights["future_piano"]):
                with tf.name_scope("l_" + str(layer_ind)):
                    x = gru_layer(x)
                    keras_layer_summary(gru_layer)
            piano_future_emb = x

        return piano_t_emb, piano_past_emb, piano_future_emb
    def predict(self, inputs_ph):
        
        piano_t, _, _, orch_past, _ = inputs_ph
        
        #####################
        # Batch norm
        # if self.binarize_piano:
        #   piano_t = BatchNorm()
        #####################


        #####################
        # GRU for modelling past orchestra
        # First layer
        if len(self.n_hs) > 1:
            return_sequences = True
        else:
            return_sequences = False
        
        with tf.name_scope("orch_rnn_0"):
            gru_layer = GRU(self.n_hs[0], return_sequences=return_sequences, input_shape=(self.temporal_order, self.orch_dim),
                    activation='relu', dropout=self.dropout_probability)
            x = gru_layer(orch_past)
            keras_layer_summary(gru_layer)
        
        if len(self.n_hs) > 1:
            # Intermediates layers
            for layer_ind in range(1, len(self.n_hs)):
                # Last layer ?
                if layer_ind == len(self.n_hs)-1:
                    return_sequences = False
                else:
                    return_sequences = True
                with tf.name_scope("orch_rnn_" + str(layer_ind)):
                    gru_layer = GRU(self.n_hs[layer_ind], return_sequences=return_sequences,
                            activation='relu', dropout=self.dropout_probability)
                    x = gru_layer(x)
                    keras_layer_summary(gru_layer)

        lstm_out = x
        #####################
        
        #####################
        # gru out and piano(t)
        with tf.name_scope("piano_embedding"):
            # fully-connected layer with 128 units and ReLU activation
            piano_t_drop = Dropout(self.dropout_probability)(piano_t)
            dense_layer = Dense(self.n_hs[-1], activation='relu')
            piano_embedding = dense_layer(piano_t_drop)
            keras_layer_summary(dense_layer)
        #####################

        #####################
        # Concatenate and predict
        with tf.name_scope("top_layer_prediction"):
            top_input = keras.layers.concatenate([lstm_out, piano_embedding], axis=1)
            top_input_drop = Dropout(self.dropout_probability)(top_input)
            # First, just the linear part
            dense_layer = Dense(self.orch_dim, activation='linear', name='orch_pred')
            biased_linear_pred = dense_layer(top_input_drop)
            keras_layer_summary(dense_layer)
            # Add the pre-computed static biases
            precomputed_biases = tf.constant(self.static_bias, dtype=tf.float32, name='precomputed_static_biases')
            fix_linear_pred = biased_linear_pred +  precomputed_biases
            # Now pass in the non-linearity
            orch_prediction = Activation('sigmoid')(fix_linear_pred)
        #####################
        
        embedding_concat = top_input
        return orch_prediction, embedding_concat
    def predict(self, inputs_ph):

        piano_t, _, _, orch_past, _ = inputs_ph

        #####################
        # Batch norm
        # if self.binarize_piano:
        #   piano_t = BatchNorm()
        #####################

        #####################
        # GRU for modelling past orchestra
        # First layer
        if len(self.n_hs) > 1:
            return_sequences = True
        else:
            return_sequences = False

        with tf.name_scope("orch_rnn_0"):
            gru_layer = GRU(self.n_hs[0],
                            return_sequences=return_sequences,
                            input_shape=(self.temporal_order, self.orch_dim),
                            activation='relu',
                            dropout=self.dropout_probability)
            x = gru_layer(orch_past)
            keras_layer_summary(gru_layer)

        if len(self.n_hs) > 1:
            # Intermediates layers
            for layer_ind in range(1, len(self.n_hs)):
                # Last layer ?
                if layer_ind == len(self.n_hs) - 1:
                    return_sequences = False
                else:
                    return_sequences = True
                with tf.name_scope("orch_rnn_" + str(layer_ind)):
                    gru_layer = GRU(self.n_hs[layer_ind],
                                    return_sequences=return_sequences,
                                    activation='relu',
                                    dropout=self.dropout_probability)
                    x = gru_layer(x)
                    keras_layer_summary(gru_layer)

        lstm_out = x
        #####################

        #####################
        # gru out and piano(t)
        with tf.name_scope("piano_embedding"):
            piano_t_ = Dropout(self.dropout_probability)(piano_t)
            dense_layer = Dense(
                self.n_hs[-1], activation='relu'
            )  # fully-connected layer with 128 units and ReLU activation
            piano_embedding = dense_layer(piano_t_)
            keras_layer_summary(dense_layer)

        #####################

        #####################
        # Concatenate and predict
        with tf.name_scope("top_layer_prediction"):
            top_input = keras.layers.concatenate([lstm_out, piano_embedding],
                                                 axis=1)
            top_input_drop = Dropout(self.dropout_probability)(top_input)
            dense_layer = Dense(self.orch_dim,
                                activation='sigmoid',
                                name='orch_pred')
            orch_prediction = dense_layer(top_input_drop)
            keras_layer_summary(dense_layer)
        #####################

        embedding_concat = top_input
        return orch_prediction, embedding_concat
 def FiLM_generator(self, x, layers):
     for gru_layer in layers:
         x = gru_layer(x)
         keras_layer_summary(gru_layer, collections=["weights"])
     return x
Пример #29
0
    def predict(self, inputs_ph):

        piano_t, _, _, orch_past, _ = inputs_ph

        #####################
        # GRU for modelling past orchestra
        # First layer
        if len(self.GRU_orch_emb) > 1:
            return_sequences = True
        else:
            return_sequences = False

        with tf.name_scope("orchestra_embedding"):
            gru_layer = GRU(self.GRU_orch_emb[0],
                            return_sequences=return_sequences,
                            input_shape=(self.temporal_order, self.orch_dim),
                            activation='relu',
                            dropout=self.dropout_probability)
            x = gru_layer(orch_past)
            keras_layer_summary(gru_layer, collections=["weights"])

            if len(self.GRU_orch_emb) > 1:
                # Intermediates layers
                for layer_ind in range(1, len(self.GRU_orch_emb)):
                    # Last layer ?
                    if layer_ind == len(self.GRU_orch_emb) - 1:
                        return_sequences = False
                    else:
                        return_sequences = True
                    with tf.name_scope("orch_rnn_" + str(layer_ind)):
                        gru_layer = GRU(self.GRU_orch_emb[layer_ind],
                                        return_sequences=return_sequences,
                                        activation='relu',
                                        dropout=self.dropout_probability)
                        x = gru_layer(x)
                        keras_layer_summary(gru_layer, collections=["weights"])

            lstm_out = x
        #####################

        #####################
        # gru out and piano(t)
        with tf.name_scope("piano_embedding"):
            x = piano_t
            for num_unit in self.MLP_piano_emb:
                dense_layer = Dense(
                    num_unit, activation='relu'
                )  # fully-connected layer with 128 units and ReLU activation
                x = dense_layer(x)
                keras_layer_summary(dense_layer, collections=["weights"])
                x = Dropout(self.dropout_probability)(x)
            piano_embedding = x
        #####################

        #####################
        # Concatenate and predict
        with tf.name_scope("top_layer_prediction"):
            x = keras.layers.concatenate([lstm_out, piano_embedding], axis=1)
            for num_unit in self.last_MLP:
                dense_layer = Dense(num_unit, activation='relu')
                x = dense_layer(x)
                keras_layer_summary(dense_layer, collections=["weights"])
                x = Dropout(self.dropout_probability)(x)
            # Residual
            dense_layer = Dense(self.orch_dim, name='orch_pred')
            residue = dense_layer(x)
            keras_layer_summary(dense_layer, collections=["weights"])
            # Input
            orch_tm1 = orch_past[:, -1, :]
            # Sum
            # activations = (orch_tm1 - 0.5) + residue
            activations = orch_tm1 + residue
            # Activation
            orch_prediction = keras.activations.sigmoid(activations)
        #####################

        embedding_concat = x
        return orch_prediction, embedding_concat
Пример #30
0
    def predict(self, piano_t, orch_past):

        #####################
        # Piano embedding
        with tf.name_scope("conv_piano"):
            conv_layer = Conv1D(self.num_filter_piano,
                                self.kernel_size_piano,
                                activation='relu')
            p0 = conv_layer(tf.reshape(piano_t, [-1, self.piano_dim, 1]))
            keras_layer_summary(conv_layer)
        with tf.name_scope("weighted_sum_piano"):
            W = tf.get_variable("W", shape=(self.num_filter_piano, ))
            p1 = tf.scalar_mul(1 / tf.reduce_sum(W),
                               tf.tensordot(p0, W, [[2], [0]]))
            variable_summary(W)
        piano_embedding = MLP(p1,
                              self.mlp_piano,
                              "mlp_piano",
                              activation='relu')
        #####################

        #####################
        # GRU for modelling past orchestra
        # First layer
        if len(self.gru_orch) > 1:
            return_sequences = True
        else:
            return_sequences = False

        with tf.name_scope("orch_rnn_0"):
            x = GRU(self.gru_orch[0],
                    return_sequences=return_sequences,
                    input_shape=(self.temporal_order, self.orch_dim),
                    activation='relu',
                    dropout=self.dropout_probability)(orch_past)

        if len(self.gru_orch) > 1:
            # Intermediates layers
            for layer_ind in range(1, len(self.gru_orch)):
                # Last layer ?
                if layer_ind == len(self.gru_orch) - 1:
                    return_sequences = False
                else:
                    return_sequences = True
                with tf.name_scope("orch_rnn_" + str(layer_ind)):
                    x = GRU(self.gru_orch[layer_ind],
                            return_sequences=return_sequences,
                            activation='relu',
                            dropout=self.dropout_probability)(x)

        orch_embedding = x
        #####################

        #####################
        # Prediction
        input_pred = tf.concat([piano_embedding, orch_embedding], axis=1)
        top_input = MLP(input_pred,
                        self.mlp_pred,
                        "mlp_pred",
                        activation='relu')
        # Dense layers on top
        with tf.name_scope("last_MLP"):
            orch_prediction = Dense(self.orch_dim,
                                    activation='sigmoid',
                                    name='orch_pred')(top_input)
        #####################

        return orch_prediction


# 'batch_size' : 200,
# 'temporal_order' : 5,
# 'dropout_probability' : 0,
# 'weight_decay_coeff' : 0,
# 'num_filter_piano': 20,
# 'kernel_size_piano': 12,
# 'mlp_piano': [500, 500],
# 'mlp_pred': [500, 500],
# 'gru_orch': [500, 500],