def handleEpochStart(epoch): percentage = int(100 * float(epoch) / float(numiterations)) print("PERCENT: {}".format(percentage)) sys.stdout.flush() print("Starting training...") sys.stdout.flush() multi_training.trainPiece(m, pcs, numiterations, handleEpochStart) sys.stdout.flush() print("Finished training...") sys.stdout.flush()
noteStateMatrixToMidi(numpy.array(all_outputs),'output/'+name) if keep_thoughts: pickle.dump(all_thoughts, open('output/'+name+'.p','wb')) def fetch_train_thoughts(m,pcs,batches,name="trainthoughts"): all_thoughts = [] for i in range(batches): ipt, opt = multi_training.getPieceBatch(pcs) thoughts = m.update_thought_fun(ipt,opt) all_thoughts.append((ipt,opt,thoughts)) pickle.dump(all_thoughts, open('output/'+name+'.p','wb')) def create_output_directory(): outputdir = "output" try: os.stat(outputdir) except: os.mkdir(outputdir) if __name__ == '__main__': pcs = multi_training.loadPieces("music") m = model.Model([300,300],[100,50], dropout=0.5) create_output_directory() multi_training.trainPiece(m, pcs, 10000) pickle.dump( m.learned_config, open( "output/final_learned_config.p", "wb" ) )
import sys import model import multi_training import main if len(sys.argv) < 7: print("Expected python run.py l1, l2, l3, l4, drp, itr.") print("Instead got", sys.argv) exit(0) l1, l2, l3, l4 = [int(x) for x in sys.argv[1:-3]] drp = float(sys.argv[-3]) itr = int(sys.argv[-2]) ofile = sys.argv[-1] print("Creating model...") m = model.Model([l1, l2], [l3, l4], dropout=drp) print("Model created") pcs = multi_training.loadPieces("music") error = multi_training.trainPiece(m, pcs, itr, ofile) main.gen_adaptive(m, pcs, 10, name="{}_{}_{}_{}_{}_{}".format(l1, l2, l3, l4, drp, error))
cons = 1 cons -= 0.02 else: cons += (1 - cons)*0.3 all_outputs.append(resdata[-1]) if keep_thoughts: all_thoughts.append(resdata) noteStateMatrixToMidi(numpy.array(all_outputs),'output/'+name) if keep_thoughts: pickle.dump(all_thoughts, open('output/'+name+'.p','wb')) def fetch_train_thoughts(m,pcs,batches,name="trainthoughts"): all_thoughts = [] for i in range(batches): ipt, opt = multi_training.getPieceBatch(pcs) thoughts = m.update_thought_fun(ipt,opt) all_thoughts.append((ipt,opt,thoughts)) pickle.dump(all_thoughts, open('output/'+name+'.p','wb')) if __name__ == '__main__': pcs = multi_training.loadPieces("music") print "building model" m = model.Model([300,300],[100,50], dropout=0.5) print "start training" multi_training.trainPiece(m, pcs, 1000) pickle.dump( m.learned_config, open( "output/final_learned_config.p", "wb" ) )
all_thoughts.append((ipt,opt,thoughts)) pickle.dump(all_thoughts, open('output/'+name+'.p','wb')) if __name__ == '__main__': pcs = multi_training.loadPiecesFromFileList(xmlfiles) print "Constructing neural net model..." m = model.Model([300,300],[100,50], dropout=0.5) def handleEpochStart(epoch): percentage = int(100 * float(epoch)/float(numiterations)) print "PERCENT: {}".format(percentage) sys.stdout.flush() print "Starting training..." sys.stdout.flush() multi_training.trainPiece(m, pcs, numiterations, handleEpochStart) sys.stdout.flush() print "Finished training..." sys.stdout.flush() pickle.dump( m.learned_config, open( outfile, "wb" )) #for line in sys.stdin: # print line
print "" print "" command = raw_input("Enter command / variable number:") if command == "1": error_interval = int(raw_input("Desired error reporting interval: ")) if command == "2": sample_interval = int(raw_input("Desired sample creating interval: ")) if command == "3": epochs_per_training = int(raw_input("Desired epochs per training: ")) if command == "4": print "Training for {} epochs, reporting error every {} epochs and generating a sample every {} epochs".format(epochs_per_training, error_interval, sample_interval) multi_training.trainPiece(m, pcs, epochs_per_training, error_interval=error_interval, sample_interval=sample_interval) epochs_trained = epochs_trained + epochs_per_training pickle.dump( m.learned_config, open( "output/final_learned_config.p", "wb" ) ) if command == "5": composition_name = raw_input("Desired composition name: ") if command == "6": composition_length = int(raw_input("Desired composition length: ")) if command == "7": print "Creating composition '{}' of length {}".format(composition_name, str(composition_length)) gen_adaptive(m,pcs,composition_length,name=composition_name) if command == "8": os._exit(0)
ipt, opt = multi_training.getPieceBatch(pcs) thoughts = m.update_thought_fun(ipt, opt) all_thoughts.append((ipt, opt, thoughts)) pickle.dump(all_thoughts, open('output/' + name + '.p', 'wb')) if __name__ == '__main__': path = 'C_music' batches = 7000 batches_old = 0 pieces = multi_training.loadPieces(dirpath=path) m = model.Model([300, 300], [100, 50], dropout=0.5) m.learned_config = pickle.load(open("output/params11000.p", "rb")) gen_adaptive(m, pieces, 1, name="composition_{0}".format(batches + batches_old), rbm=True) exit() print 'Training {0}+{1} batches on {2}'.format(batches, batches_old, path) multi_training.trainPiece( m, pieces, [batches, batches_old]) #, notes_to_input = None) pickle.dump( m.learned_config, open("output/final_learned_config_{0}.p".format(batches + batches_old), "wb"))
pickle.dump(all_thoughts, open('output/' + name + '.p', 'wb')) def get_last_epoch(model_directory): # Get all file names in model_directory files = [file for file in os.listdir(model_directory) if '.p' in file] # Function that go over a string and create in a list all individual # numbers and then return all of them joined as a string. get_number = (lambda string: "".join(list(filter(lambda c: c.isdigit(), string)))) # Map the get_number over all the names and return them as integers epochs = list(map(lambda string: int(get_number(string)), files)) epochs.append(0) # Append 0 in case of void list return max(epochs) if __name__ == '__main__': # Directory in which the parameters that have been calculated for the model # are saved. music_type_dir = "Scale" save_output_dir = music_type_dir + "/output" os.makedirs(save_output_dir, exist_ok=True) # Create and evaluate model pcs = multi_training.loadPieces("Scale2") start = get_last_epoch(save_output_dir) m = model.BiaxialRNNModel([300, 300], [100, 50]) multi_training.trainPiece(m, pcs, 10000, "Scale", start)
all_thoughts.append((ipt, opt, thoughts)) pickle.dump(all_thoughts, open('output/' + name + '.p', 'wb')) if __name__ == '__main__': pcs = multi_training.loadPieces("music/beeth/") print('nun lädt der eumel das model') # To use the model, you need to first create an instance of the Model class m = model.Model([300, 300], [100, 50], dropout=0.5) # output and the learned parameters and sample every 50 iterations. m.learned_config = pickle.load(open("my_final_learned_config.p", "rb")) print('nun tranieren') multi_training.trainPiece(m, pcs, 5) # generate midi with model after training gen_adaptive(m, pcs, 5, name="composition9") # speichert das model pickle.dump(m.learned_config, open("output/my_final_learned_config.p", "wb")) # If you want to save your model weights, you can do # pickle.dump( m.learned_config, open( "path_to_weight_file.p", "wb" ) ) # and if you want to load them, you can do # m.learned_config = pickle.load(open( "path_to_weight_file.p", "rb" ) )
ipt, opt = multi_training.getPieceBatch(pcs) thoughts = m.update_thought_fun(ipt, opt) all_thoughts.append((ipt, opt, thoughts)) pickle.dump(all_thoughts, open('output/' + name + '.p', 'wb')) if __name__ == '__main__': pcs = multi_training.loadPiecesFromFileList(xmlfiles) print "Constructing neural net model..." m = model.Model([300, 300], [100, 50], dropout=0.5) def handleEpochStart(epoch): percentage = int(100 * float(epoch) / float(numiterations)) print "PERCENT: {}".format(percentage) sys.stdout.flush() print "Starting training..." sys.stdout.flush() multi_training.trainPiece(m, pcs, numiterations, handleEpochStart) sys.stdout.flush() print "Finished training..." sys.stdout.flush() pickle.dump(m.learned_config, open(outfile, "wb")) #for line in sys.stdin: # print line
cons -= 0.02 else: cons += (1 - cons) * 0.3 all_outputs.append(resdata[-1]) if keep_thoughts: all_thoughts.append(resdata) noteStateMatrixToMidi(numpy.array(all_outputs), 'output/' + name) if keep_thoughts: pickle.dump(all_thoughts, open('output/' + name + '.p', 'wb')) def fetch_train_thoughts(m, pcs, batches, name="trainthoughts"): all_thoughts = [] for i in range(batches): ipt, opt = multi_training.getPieceBatch(pcs) thoughts = m.update_thought_fun(ipt, opt) all_thoughts.append((ipt, opt, thoughts)) pickle.dump(all_thoughts, open('output/' + name + '.p', 'wb')) if __name__ == '__main__': pcs = multi_training.loadPieces("music") m = model.Model([300, 300], [100, 50], dropout=0.5) #multi_training.trainPiece(m, pcs, 10000) multi_training.trainPiece(m, pcs, 206) pickle.dump(m.learned_config, open("output/final_learned_config.p", "wb"))
def main(): convert(); m = model.Model([300, 300], [100, 50], dropout=0.5) pcs = multi_training.loadPieces("music") multi_training.trainPiece(m, pcs, 10000) gen_adaptive(m, pcs, 10, name="composition")
if keep_thoughts: pickle.dump(all_thoughts, open("output/" + name + ".p", "wb")) def fetch_train_thoughts(m, pcs, batches, name="trainthoughts"): all_thoughts = [] for i in range(batches): ipt, opt = multi_training.getPieceBatch(pcs) thoughts = m.update_thought_fun(ipt, opt) all_thoughts.append((ipt, opt, thoughts)) pickle.dump(all_thoughts, open("output/" + name + ".p", "wb")) if __name__ == "__main__": path = "C_music" batches = 7000 batches_old = 0 pieces = multi_training.loadPieces(dirpath=path) m = model.Model([300, 300], [100, 50], dropout=0.5) m.learned_config = pickle.load(open("output/params11000.p", "rb")) gen_adaptive(m, pieces, 1, name="composition_{0}".format(batches + batches_old), rbm=True) exit() print "Training {0}+{1} batches on {2}".format(batches, batches_old, path) multi_training.trainPiece(m, pieces, [batches, batches_old]) # , notes_to_input = None) pickle.dump(m.learned_config, open("output/final_learned_config_{0}.p".format(batches + batches_old), "wb"))