Esempio n. 1
0
    WVD = T.signal.wvd(T.expand_dims(signal, 1), 1024, L=L, hop=32)
else:
    WVD = T.signal.mfsc(T.expand_dims(signal, 1), 1024, 192, 80, 2, 44100 / 4,
                        44100 / 4)

tf_func = theanoxla.function(signal, outputs=[WVD], backend='cpu')

tf = T.Placeholder(WVD.shape, 'float32')
label = T.Placeholder((BS, ), 'int32')
deterministic = T.Placeholder((1, ), 'bool')

# first layer
NN = 32
if L > 0:
    x, y, = T.meshgrid(T.linspace(-5, 5, NN), T.linspace(-5, 5, NN))
    grid = T.stack([x.flatten(), y.flatten()], 1)
    cov = T.Variable(np.eye(2), name='cov')
    gaussian = T.exp(-(grid.dot(cov.T().dot(cov)) * grid).sum(1)).reshape(
        (1, 1, NN, NN))
    layer = [
        layers.Conv2D(tf, 1, (NN, NN), strides=(6, 6), W=gaussian, b=None)
    ]
    layer[-1].add_variable(cov)
    layer.append(layers.Activation(layer[-1],
                                   lambda x: T.log(T.abs(x) + 0.01)))
else:
    layer = [layers.Activation(tf + 0.01, T.log)]

layer.append(layers.Conv2D(layer[-1], 16, (3, 3)))
layer.append(layers.BatchNormalization(layer[-1], [0, 2, 3], deterministic))
layer.append(layers.Activation(layer[-1], T.leaky_relu))
Esempio n. 2
0
DATA, _ = datasets.make_moons(1000)

X = T.Placeholder([BS, 2], 'float32')
Z = T.Placeholder([BS, 2], 'float32')

G_sample = generator(Z, 2)
logits = discriminator(T.concatenate([G_sample[-1], X]))
labels = T.concatenate([T.zeros(BS, dtype='int32'), T.ones(BS, dtype='int32')])

disc_loss = losses.sparse_crossentropy_logits(labels, logits[-1]).mean()
gen_loss = losses.sparse_crossentropy_logits(1 - labels[:BS],
                                             logits[-1][:BS]).mean()
masks = T.concatenate([G_sample[1] > 0, G_sample[3] > 0], 1)

A = T.stack([
    gradients(G_sample[-1][:, 0].sum(), [Z])[0],
    gradients(G_sample[-1][:, 1].sum(), [Z])[0]
], 1)
det = T.abs(T.det(A))

d_variables = sum([l.variables() for l in logits], [])
g_variables = sum([l.variables() for l in G_sample], [])

updates_d = optimizers.Adam(disc_loss, d_variables, lr)
updates_g = optimizers.Adam(gen_loss, g_variables, lr)
updates = {**updates_d, **updates_g}

f = function(Z, X, outputs=[disc_loss, gen_loss], updates=updates)
g = function(Z, outputs=[G_sample[-1]])

h = function(Z, outputs=[masks, det])