示例#1
0
y_train = to_categorical(y_train)
y_test = to_categorical(y_test)
print('x_train shape:', x_train.shape)
print('y_train shape:', y_train.shape)
print('x_test shape:', x_test.shape)
print('y_test shape:', y_test.shape)

n_timesteps, n_features, n_outputs = x_train.shape[1], x_train.shape[
    2], y_train.shape[1]
epochs = 2
batch_size = 40
n_hidden = 128
#
model = Sequential()
model.add(
    GRU(n_hidden, input_shape=(n_timesteps, n_features),
        return_sequences=True))
model.add(GRU(n_hidden, return_sequences=True))
model.add(GRU(n_hidden, return_sequences=False))
model.add(Dropout(0.5))
model.add(Dense(n_outputs, activation='softmax'))

model.compile(loss='categorical_crossentropy',
              optimizer='RMSProp',
              metrics=['accuracy'])

history = model.fit(
    x_train,
    y_train,
    batch_size=batch_size,
    validation_data=(x_test, y_test),
    epochs=epochs,
示例#2
0
def create_gru_vae(seq_len=SEQ_LEN,
                   input_dim=INPUT_DIM,
                   gru_hidden_size=GRU_HIDDEN_SIZE,
                   linear_hidden_size=LINEAR_HIDDEN_SIZE,
                   num_directions=NUM_DIRECTIONS,
                   epsilon_std=1.,
                   beat=BEAT):

    #encpder layers
    simple_gru = GRU(gru_hidden_size)
    gru = Bidirectional(simple_gru)
    gru_out_dim = seq_len * gru_hidden_size * num_directions
    gru_in_dim = seq_len * input_dim

    bn0 = BatchNormalization()
    linear0 = Dense(linear_hidden_size[0], input_dim=gru_out_dim)
    bn1 = BatchNormalization()

    #decoder layers

    linear0d = Dense(gru_in_dim)
    bn0d = BatchNormalization()
    bn1d = BatchNormalization()

    linear1d = Dense(input_dim)
    bn2d = BatchNormalization()

    grud = GRU(input_dim, return_sequences=True, return_state=True)

    #encoder_part

    # first=Input(batch_shape=(batch_size,seq_len, input_dim))
    first = Input(shape=(seq_len, input_dim))
    x = gru(first)
    x = bn0(x)
    x = linear0(x)
    x = Activation('tanh')(x)
    h = bn1(x)

    # VAE Z layer
    z_mean = Dense(linear_hidden_size[1])(h)
    z_log_sigma = Dense(linear_hidden_size[1])(h)

    def sampling(args):
        z_mean, z_log_sigma = args
        epsilon = K.random_normal(shape=(linear_hidden_size[1], ),
                                  mean=0.,
                                  stddev=epsilon_std)

        return z_mean + z_log_sigma * epsilon

    z = Lambda(sampling,
               output_shape=(linear_hidden_size[1], ))([z_mean, z_log_sigma])
    z_ = Input(shape=(linear_hidden_size[1], ))

    def decoder(z):
        n_sections = SEQ_LEN // beat
        b = beat
        list_tensor = []
        x = linear0d(z)
        x = bn0d(x)
        x = Activation('tanh')(x)
        x = Reshape((seq_len, input_dim))(x)

        #custom sections

        for i in range(n_sections):

            if i == 0:
                x, hn = grud(x)
            else:
                x, hn = grud(x)

            x = bn1d(x)
            print(x)
            x = Lambda(lambda x: x[:, :b, :])(x)
            print(x)
            x = linear1d(x)
            x = bn2d(x)
            x = Activation('sigmoid')(x)
            list_tensor.append(x)

        melody = Concatenate(axis=1)(list_tensor)
        last = Activation('tanh')(melody)

        return last

    last = decoder(z)
    last_g = decoder(z_)
    print(last)
    vae = Model(first, last)
    encoder = Model(first, z_mean)

    generator = Model(z_, last_g)

    def vae_loss(x, x_decoded_mean):
        xent_loss = objectives.mse(x, x_decoded_mean)
        kl_loss = -0.5 * K.mean(1 + z_log_sigma - K.square(z_mean) -
                                K.exp(z_log_sigma))
        loss = xent_loss + kl_loss
        return loss

    vae.compile(optimizer='rmsprop', loss=vae_loss)

    generator.compile(optimizer='rmsprop', loss=vae_loss)

    return vae, encoder, generator