Example #1
0
# optimizer = tf.train.AdagradOptimizer(lr)
optimizer = tf.train.AdamOptimizer(lr)
# optimizer = tf.train.RMSPropOptimizer(lr)
# optimizer = tf.train.AdadeltaOptimizer(lr)

train_step = optimizer.apply_gradients(zip(grads, tvars))


train_data = np.load(pj(os.environ["HOME"], "Music", "ml", "test_licks.data.npy"))
input_size = train_data.shape[0]
corpus = dispatch_array(train_data, bptt_steps, batch_size)

sfn=9
sf_sigma = 0.1
sf = np.exp(-np.square(0.5-np.linspace(0.0, 1.0, sfn))/sf_sigma)
data = gen_poisson(np.asarray(input_size*[1.0/seq_size]), seq_size, 1.0, sf, seed)
corpus = dispatch_array(data.T, bptt_steps, batch_size)
# corpus = [[]]
# for seq_i in xrange(data.shape[0]):
#     corpus[-1].append(np.asarray([data[seq_i, :]]))
#     if len(corpus[-1]) % bptt_steps == 0:
#         corpus.append([])
# if len(corpus[-1]) == 0:
#     corpus = corpus[:-1]


weights, recc_weights, bias = [], [], []
outputs_info, states_info, winput_info = [], [], []
grads_info = []
with tf.device("/{}:0".format(device)):
    sess = tf.Session()
Example #2
0
loss = tf.add_n([ tf.nn.l2_loss(target - output) for output, target in zip(outputs, targets) ]) / seq_size / batch_size / net_size

lr = tf.Variable(0.0, trainable=False)

tvars = tf.trainable_variables()
grads_raw = tf.gradients(loss, tvars)
grads, _ = tf.clip_by_global_norm(grads_raw, 5.0)

#optimizer = tf.train.GradientDescentOptimizer(lr)
optimizer = tf.train.AdagradOptimizer(lr)
train_step = optimizer.apply_gradients(zip(grads, tvars))

sfn=9
sf_sigma = 0.1
sf = np.exp(-np.square(0.5-np.linspace(0.0, 1.0, sfn))/sf_sigma)
inputs_v = [ np.asarray([s]) for s in gen_poisson(np.asarray(input_size*[1.0/seq_size]), seq_size, 1.0, sf, seed) ]
targets_v = [ np.asarray([s]) for s in gen_poisson(np.asarray(net_size*[1.0/seq_size]), seq_size, 1.0, sf, seed) ]

init_state_v = np.zeros((batch_size, 2*net_size+le_size))



sess = tf.Session()
writer = tf.train.SummaryWriter("{}/tf".format(os.environ["HOME"]), sess.graph)

weights, recc_weights, bias = [], [], []
outputs_info, states_info, winput_info = [], [], []
grads_info = []
with tf.device("/cpu:0"):
    sess.run(tf.initialize_all_variables())
    for e in xrange(epochs):
Example #3
0
lr = tf.Variable(0.0, trainable=False)

tvars = tf.trainable_variables()
grads_raw = tf.gradients(loss, tvars)
grads, _ = tf.clip_by_global_norm(grads_raw, 5.0)

#optimizer = tf.train.GradientDescentOptimizer(lr)
optimizer = tf.train.AdagradOptimizer(lr)
train_step = optimizer.apply_gradients(zip(grads, tvars))

sfn = 9
sf_sigma = 0.1
sf = np.exp(-np.square(0.5 - np.linspace(0.0, 1.0, sfn)) / sf_sigma)
inputs_v = [
    np.asarray([s])
    for s in gen_poisson(np.asarray(input_size *
                                    [1.0 / seq_size]), seq_size, 1.0, sf, seed)
]
targets_v = [
    np.asarray([s])
    for s in gen_poisson(np.asarray(net_size *
                                    [1.0 / seq_size]), seq_size, 1.0, sf, seed)
]

init_state_v = np.zeros((batch_size, 2 * net_size + le_size))

sess = tf.Session()
writer = tf.train.SummaryWriter("{}/tf".format(os.environ["HOME"]), sess.graph)

weights, recc_weights, bias = [], [], []
outputs_info, states_info, winput_info = [], [], []
grads_info = []
# optimizer = tf.train.AdagradOptimizer(lr)
optimizer = tf.train.AdamOptimizer(lr)
# optimizer = tf.train.RMSPropOptimizer(lr)
# optimizer = tf.train.AdadeltaOptimizer(lr)

train_step = optimizer.apply_gradients(zip(grads, tvars))

train_data = np.load(
    pj(os.environ["HOME"], "Music", "ml", "test_licks.data.npy"))
input_size = train_data.shape[0]
corpus = dispatch_array(train_data, bptt_steps, batch_size)

sfn = 9
sf_sigma = 0.1
sf = np.exp(-np.square(0.5 - np.linspace(0.0, 1.0, sfn)) / sf_sigma)
data = gen_poisson(np.asarray(input_size * [1.0 / seq_size]), seq_size, 1.0,
                   sf, seed)
corpus = dispatch_array(data.T, bptt_steps, batch_size)
# corpus = [[]]
# for seq_i in xrange(data.shape[0]):
#     corpus[-1].append(np.asarray([data[seq_i, :]]))
#     if len(corpus[-1]) % bptt_steps == 0:
#         corpus.append([])
# if len(corpus[-1]) == 0:
#     corpus = corpus[:-1]

weights, recc_weights, bias = [], [], []
outputs_info, states_info, winput_info = [], [], []
grads_info = []
with tf.device("/{}:0".format(device)):
    sess = tf.Session()
    writer = tf.train.SummaryWriter("{}/tf".format(os.environ["HOME"]),