def getDecadesProbFromRNN(indices,min_length, timbres, pitches,years):
    batch_X, batch_y = songProcess.get_batch_data(indices, min_length, timbres, pitches, songProcess.transfer_year_to_decade(years))
    m = np.size(indices)
    logits = np.zeros([m,10])
    '''Restore RNN Session'''
    with tf.Session() as sesssion:
        #sesssion.run(init_lo)
        saver.restore(sesssion, "my_net/save_net_rnn.ckpt")
        # print 10 predictions from test data
        logits = sesssion.run(output, feed_dict={tf_x: batch_X, tf_y:  songProcess.transfer_year_to_10d(batch_y)})
        decades = soft_max(logits)
        sesssion.close()
    return decades
Exemplo n.º 2
0
for i in range(iteration):

    np.random.shuffle(index_array)
    batch_indices = index_array[0:batch_size]
    year_10 = RNN_Trained.get_batch_decades_from_RNN(batch_indices,
                                                     global_decades)
    batch_X_without_year, batch_y = songProcess.get_batch_non_time_data(
        batch_indices, train_non_time, train_year)
    batch_X = np.hstack([batch_X_without_year, year_10])
    #sess.run(train_step, feed_dict={xs: batch_X, ys: songProcess.transfer_year_to_68d(batch_y)})
    batch_X = scaler.transform(batch_X)
    batch_y = songProcess.transfer_year_to_decade(batch_y)
    d_, loss_ = sess.run([train_step, loss], {
        xs: batch_X,
        ys: songProcess.transfer_year_to_10d(batch_y)
    })
    if i % 50 == 0:
        saver.save(sess=sess,
                   save_path="nn_music_model/nn_music.model",
                   global_step=i)  # 保存模型
        #y_pred = sess.run(pred, feed_dict=feed_dict_train)
        rs = sess.run(merged, {
            xs: batch_X,
            ys: songProcess.transfer_year_to_10d(batch_y)
        })
        #writer.add_summary(summary=rs, global_step=i)  #写tensorbord

        accuracy_ = sess.run(accuracy, {
            xs: batch_X,
            ys: songProcess.transfer_year_to_10d(batch_y)
Exemplo n.º 3
0
accuracy = tf.metrics.accuracy(          # return (acc, update_op), and create 2 local variables
    labels=tf.argmax(ys, axis=1), predictions=tf.argmax(prediction, axis=1),)[1]

init_op = tf.group(tf.global_variables_initializer(), tf.local_variables_initializer()) 

sess = tf.Session()
sess.run(init_op)

cv_array = np.arange(0, len(cv_year))
cv_X_without_year,cv_y = songProcess.get_batch_non_time_data(cv_array,cv_non_time,cv_year)
cv_y = songProcess.transfer_year_to_decade(cv_y)


for i in range(iteration):
    index_array = np.arange(0, np.size(train_year)) 
    np.random.shuffle(index_array)
    batch_indices = index_array[0:batch_size]
    batch_X_without_year,batch_y = songProcess.get_batch_non_time_data(batch_indices,train_non_time,train_year)
    batch_decade = songProcess.transfer_year_to_decade(batch_y)
    one_hot_decade = songProcess.transfer_year_to_10d(batch_decade)
    sess.run(train_step,feed_dict={xs: batch_X_without_year, ys: one_hot_decade})
    if i%50==0:
        loss_ = sess.run(loss, {xs: batch_X_without_year, ys: one_hot_decade})
        accuracy_ = sess.run(accuracy, {xs: batch_X_without_year, ys:  one_hot_decade})
        print(str(i)+'train loss: %.4f' % loss_, '| train accuracy: %.2f'% accuracy_)
prediction = sess.run(prediction, feed_dict={xs: cv_X_without_year, ys:  songProcess.transfer_year_to_10d(cv_y)})
predict_decades = songProcess.transfer_10d_to_year(prediction)
acc = np.sum(predict_decades==cv_y)/np.size(cv_y)
print(acc)
#print('| cv accuracy: %.2f' % accuracy_cv)
'''#####################PLACE 3#######################'''
merged = tf.summary.merge_all()
writer = tf.summary.FileWriter("logsoflstm/", sess.graph)# creat sub folder in the same folder
'''##################################################'''

sess.run(init_op)     # initialize var in graph
for step in range(5000):    # training iteration
    '''Get batch set'''
    # get batch ids
    index_array = np.arange(0, len(train_years))
    np.random.shuffle(index_array)
    batch_indices = index_array[0:batch_size]
    # get batch_x and batch_y
    batch_X, batch_y = songProcess.get_batch_data(batch_indices, min_length, train_timbres, train_pitches, songProcess.transfer_year_to_decade(train_years))
'''#####################PLACE 4#######################'''
    _, loss_ = sess.run([train_op, merged], {tf_x: batch_X, tf_y: songProcess.transfer_year_to_10d(batch_y)}) # change into merged
    writer.add_summary(loss_, step) # add
    if step % 50 == 0:
        accuracy_ = sess.run(merged, {tf_x: batch_X, tf_y:  songProcess.transfer_year_to_10d(batch_y)})# change into merged
        writer.add_summary(accuracy_, step) # add
'''##################################################'''

        #print('train loss: %.4f' % loss_, '| train accuracy: %.2f' % accuracy_)
        #if accuracy_>0.8:
        #    save_path = saver.save(sess, "/Users/farewell/Desktop/Data_Project_ECL/save_net_rnn.ckpt")
        #    accuracy_cv = sess.run(accuracy, feed_dict={tf_x: cv_X, tf_y:  songProcess_back.transfer_year_to_68d(cv_y)})
        #    print('cv accuracy: %.2f' % accuracy_cv)
        #    break;
#accuracy_cv = sess.run(merged, feed_dict={tf_x: cv_X, tf_y:  songProcess.transfer_year_to_10d(cv_y)})
    #writer.add_summary(accuracy_cv, step)
#print (sess.run(accuracy_cv,feed_dict={tf_x: cv_X, tf_y:  songProcess.transfer_year_to_10d(cv_y)}))
#GradientDescentOptimizer(0.00000000005).minimize(loss)

accuracy = tf.metrics.accuracy(  # return (acc, update_op), and create 2 local variables
    labels=tf.argmax(ys, axis=1),
    predictions=tf.argmax(prediction, axis=1),
)[1]

init_op = tf.group(tf.global_variables_initializer(),
                   tf.local_variables_initializer())

sess = tf.Session()
sess.run(init_op)
tmp_d = RNN_Trained.getDecadesFromRNN(np.arange(0, np.size(train_year)),
                                      min_length, train_timbres, train_pitches,
                                      train_year)
global_decades = songProcess.transfer_year_to_10d(tmp_d)
tmp_d2 = RNN_Trained.getDecadesFromRNN(np.arange(0,
                                                 np.size(cv_year)), min_length,
                                       cv_timbres, cv_pitches, cv_year)
global_cv_decades = songProcess.transfer_year_to_10d(tmp_d2)
tmp_d3 = RNN_Trained.getDecadesFromRNN(np.arange(0, np.size(test_year)),
                                       min_length, test_timbres, test_pitches,
                                       test_year)
global_test_decades = songProcess.transfer_year_to_10d(tmp_d3)

index_array = np.arange(0, np.size(train_year))
train_X_without_year, train_y = songProcess.get_batch_non_time_data(
    index_array, train_non_time, train_year)
train_year10 = RNN_Trained.get_batch_decades_from_RNN(index_array,
                                                      global_decades)
train_X = np.hstack([train_X_without_year, train_year10])
'''RNN main controler'''
sess = tf.Session()
sess.run(init_op)  # initialize var in graph
for step in range(5000):  # training iteration
    '''Get batch set'''
    # get batch ids
    index_array = np.arange(0, len(train_years))
    np.random.shuffle(index_array)
    batch_indices = index_array[0:batch_size]
    # get batch_x and batch_y
    batch_X, batch_y = songProcess.get_batch_data(
        batch_indices, min_length, train_timbres, train_pitches,
        songProcess.transfer_year_to_decade(train_years))
    _, loss_ = sess.run([train_op, loss], {
        tf_x: batch_X,
        tf_y: songProcess.transfer_year_to_10d(batch_y)
    })
    if step % 50 == 0:  # testing
        accuracy_ = sess.run(accuracy, {
            tf_x: batch_X,
            tf_y: songProcess.transfer_year_to_10d(batch_y)
        })
        print('train loss: %.4f' % loss_, '| train accuracy: %.2f' % accuracy_)
        if accuracy_ > 0.985:
            save_path = saver.save(sess, "my_net/save_net_rnn.ckpt")
            #    accuracy_cv = sess.run(accuracy, feed_dict={tf_x: cv_X, tf_y:  songProcess_back.transfer_year_to_68d(cv_y)})
            #    print('cv accuracy: %.2f' % accuracy_cv)
            break
accuracy_cv = sess.run(accuracy,
                       feed_dict={
                           tf_x: cv_X,