Esempio n. 1
0
index_array = np.arange(0, np.size(train_year))
train_X_without_year, train_y = songProcess.get_batch_non_time_data(
    index_array, train_non_time, train_year)
train_year10 = RNN_Trained.get_batch_decades_from_RNN(index_array,
                                                      global_decades)
train_X = np.hstack([train_X_without_year, train_year10])
scaler = MinMaxScaler(feature_range=(-5, 5))
scaler.fit(train_X)

cv_array = np.arange(0, len(cv_year))
cv_X_without_year, cv_y = songProcess.get_batch_non_time_data(
    cv_array, cv_non_time, cv_year)
cv_year10 = RNN_Trained.get_batch_decades_from_RNN(cv_array, global_cv_decades)
cv_X = np.hstack([cv_X_without_year, cv_year10])
cv_X = scaler.transform(cv_X)
cv_y = songProcess.transfer_year_to_decade(cv_y)

test_array = np.arange(0, len(test_year))
test_X_without_year, test_y = songProcess.get_batch_non_time_data(
    test_array, test_non_time, test_year)
test_year10 = RNN_Trained.get_batch_decades_from_RNN(test_array,
                                                     global_test_decades)
test_X = np.hstack([test_X_without_year, test_year10])
test_X = scaler.transform(test_X)
test_y = songProcess.transfer_year_to_decade(test_y)

for i in range(iteration):

    np.random.shuffle(index_array)
    batch_indices = index_array[0:batch_size]
    year_10 = RNN_Trained.get_batch_decades_from_RNN(batch_indices,
init_op = tf.group(tf.global_variables_initializer(),
                   tf.local_variables_initializer())
saver = tf.train.Saver()
'''RNN main controler'''
sess = tf.Session()
sess.run(init_op)  # initialize var in graph
for step in range(5000):  # training iteration
    '''Get batch set'''
    # get batch ids
    index_array = np.arange(0, len(train_years))
    np.random.shuffle(index_array)
    batch_indices = index_array[0:batch_size]
    # get batch_x and batch_y
    batch_X, batch_y = songProcess.get_batch_data(
        batch_indices, min_length, train_timbres, train_pitches,
        songProcess.transfer_year_to_decade(train_years))
    _, loss_ = sess.run([train_op, loss], {
        tf_x: batch_X,
        tf_y: songProcess.transfer_year_to_10d(batch_y)
    })
    if step % 50 == 0:  # testing
        accuracy_ = sess.run(accuracy, {
            tf_x: batch_X,
            tf_y: songProcess.transfer_year_to_10d(batch_y)
        })
        print('train loss: %.4f' % loss_, '| train accuracy: %.2f' % accuracy_)
        if accuracy_ > 0.985:
            save_path = saver.save(sess, "my_net/save_net_rnn.ckpt")
            #    accuracy_cv = sess.run(accuracy, feed_dict={tf_x: cv_X, tf_y:  songProcess_back.transfer_year_to_68d(cv_y)})
            #    print('cv accuracy: %.2f' % accuracy_cv)
            break
sess = tf.Session()
'''#####################PLACE 3#######################'''
merged = tf.summary.merge_all()
writer = tf.summary.FileWriter("logsoflstm/", sess.graph)# creat sub folder in the same folder
'''##################################################'''

sess.run(init_op)     # initialize var in graph
for step in range(5000):    # training iteration
    '''Get batch set'''
    # get batch ids
    index_array = np.arange(0, len(train_years))
    np.random.shuffle(index_array)
    batch_indices = index_array[0:batch_size]
    # get batch_x and batch_y
    batch_X, batch_y = songProcess.get_batch_data(batch_indices, min_length, train_timbres, train_pitches, songProcess.transfer_year_to_decade(train_years))
'''#####################PLACE 4#######################'''
    _, loss_ = sess.run([train_op, merged], {tf_x: batch_X, tf_y: songProcess.transfer_year_to_10d(batch_y)}) # change into merged
    writer.add_summary(loss_, step) # add
    if step % 50 == 0:
        accuracy_ = sess.run(merged, {tf_x: batch_X, tf_y:  songProcess.transfer_year_to_10d(batch_y)})# change into merged
        writer.add_summary(accuracy_, step) # add
'''##################################################'''

        #print('train loss: %.4f' % loss_, '| train accuracy: %.2f' % accuracy_)
        #if accuracy_>0.8:
        #    save_path = saver.save(sess, "/Users/farewell/Desktop/Data_Project_ECL/save_net_rnn.ckpt")
        #    accuracy_cv = sess.run(accuracy, feed_dict={tf_x: cv_X, tf_y:  songProcess_back.transfer_year_to_68d(cv_y)})
        #    print('cv accuracy: %.2f' % accuracy_cv)
        #    break;
#accuracy_cv = sess.run(merged, feed_dict={tf_x: cv_X, tf_y:  songProcess.transfer_year_to_10d(cv_y)})
def getDecadesProbFromRNN(indices,min_length, timbres, pitches,years):
    batch_X, batch_y = songProcess.get_batch_data(indices, min_length, timbres, pitches, songProcess.transfer_year_to_decade(years))
    m = np.size(indices)
    logits = np.zeros([m,10])
    '''Restore RNN Session'''
    with tf.Session() as sesssion:
        #sesssion.run(init_lo)
        saver.restore(sesssion, "my_net/save_net_rnn.ckpt")
        # print 10 predictions from test data
        logits = sesssion.run(output, feed_dict={tf_x: batch_X, tf_y:  songProcess.transfer_year_to_10d(batch_y)})
        decades = soft_max(logits)
        sesssion.close()
    return decades