コード例 #1
0
ファイル: upsample_test.py プロジェクト: ml-lab/SymJAX








w = T.Placeholder(SHAPE, 'float32', name='w')
noise = T.random.uniform(SHAPE, dtype='float32')
y = T.cos(theanoxla.nn.activations.leaky_relu(z,0.3) + w + noise)
cost = T.pool(y, (2, 2))
cost = T.sum(cost)

grads = theanoxla.gradients(cost, [w, z], [1])

print(cost.get({w: np.random.randn(*SHAPE)}))
noise.seed = 20
print(cost.get({w: np.random.randn(*SHAPE)}))
noise.seed = 40
print(cost.get({w: np.random.randn(*SHAPE)}))

updates = {z:z-0.01*grads[0]}
fn1 = theanoxla.function(w, outputs=[cost])
fn2 = theanoxla.function(w, outputs=[cost], updates=updates)
print(fn1(np.random.randn(*SHAPE)))
print(fn1(np.random.randn(*SHAPE)))

cost = list()
for i in range(1000):
コード例 #2
0
DATA, _ = datasets.make_moons(1000)

X = T.Placeholder([BS, 2], 'float32')
Z = T.Placeholder([BS, 2], 'float32')

G_sample = generator(Z, 2)
logits = discriminator(T.concatenate([G_sample[-1], X]))
labels = T.concatenate([T.zeros(BS, dtype='int32'), T.ones(BS, dtype='int32')])

disc_loss = losses.sparse_crossentropy_logits(labels, logits[-1]).mean()
gen_loss = losses.sparse_crossentropy_logits(1 - labels[:BS],
                                             logits[-1][:BS]).mean()
masks = T.concatenate([G_sample[1] > 0, G_sample[3] > 0], 1)

A = T.stack([
    gradients(G_sample[-1][:, 0].sum(), [Z])[0],
    gradients(G_sample[-1][:, 1].sum(), [Z])[0]
], 1)
det = T.abs(T.det(A))

d_variables = sum([l.variables() for l in logits], [])
g_variables = sum([l.variables() for l in G_sample], [])

updates_d = optimizers.Adam(disc_loss, d_variables, lr)
updates_g = optimizers.Adam(gen_loss, g_variables, lr)
updates = {**updates_d, **updates_g}

f = function(Z, X, outputs=[disc_loss, gen_loss], updates=updates)
g = function(Z, outputs=[G_sample[-1]])

h = function(Z, outputs=[masks, det])