コード例 #1
0
BATCH_SIZE = 20
H_SIZE = 128

encoder = HiddenLayer(rng, (128, H_SIZE))
encode_igate = HiddenLayer(rng, (128, H_SIZE))
encode_fgate = HiddenLayer(rng, (128, H_SIZE))

recoder = HiddenLayer(rng, (H_SIZE, H_SIZE))
recode_igate = HiddenLayer(rng, (H_SIZE, H_SIZE))
recode_fgate = HiddenLayer(rng, (H_SIZE, H_SIZE))

activation = ActivationLayer(rng, f='elu')
dropout = DropoutLayer(rng, 0.2)

encoder_network = Network(
    Conv1DLayer(rng, (64, 63, 25), (BATCH_SIZE, 63, 240)),
    Pool1DLayer(rng, (2, ), (BATCH_SIZE, 64, 240)),
    ActivationLayer(rng, f='elu'),
    Conv1DLayer(rng, (256, 64, 25), (BATCH_SIZE, 64, 120)),
    Pool1DLayer(rng, (2, ), (BATCH_SIZE, 256, 120)),
    ActivationLayer(rng, f='elu'),
)

encoder_network.load([
    '../models/vae_lstm/3_vae_lstm_layer_0.npz',
    None,
    None,
    '../models/vae_lstm/3_vae_lstm_layer_1.npz',
    None,
    None,
])
コード例 #2
0
from nn.ActivationLayer import ActivationLayer
from nn.DropoutLayer import DropoutLayer
from nn.Pool1DLayer import Pool1DLayer
from nn.Conv1DLayer import Conv1DLayer
from nn.VariationalLayer import VariationalLayer
from nn.Network import Network, AutoEncodingNetwork, InverseNetwork

rng = np.random.RandomState(23455)

BATCH_SIZE = 1
network = Network(
    
    Network(
        DropoutLayer(rng, 0.25),    
        Conv1DLayer(rng, (64, 66, 25), (BATCH_SIZE, 66, 240)),
        Pool1DLayer(rng, (2,), (BATCH_SIZE, 64, 240)),
        ActivationLayer(rng),

        DropoutLayer(rng, 0.25),    
        Conv1DLayer(rng, (128, 64, 25), (BATCH_SIZE, 64, 120)),
        Pool1DLayer(rng, (2,), (BATCH_SIZE, 128, 120)),
        ActivationLayer(rng),
    ),
    
    Network(
        VariationalLayer(rng),
    ),
    
    Network(
        InverseNetwork(Pool1DLayer(rng, (2,), (BATCH_SIZE, 64, 120))),
コード例 #3
0
test_split = int(X.shape[0] * 0.8)

X, Y = map(np.array, zip(*shuffled))
X_train = theano.shared(np.array(X)[:cv_split], borrow=True)
Y_train = theano.shared(np.array(Y)[:cv_split], borrow=True)

X_valid = theano.shared(np.array(X)[cv_split:test_split], borrow=True)
Y_valid = theano.shared(np.array(Y)[cv_split:test_split], borrow=True)

X_test = theano.shared(np.array(X)[test_split:], borrow=True)
Y_test = theano.shared(np.array(Y)[test_split:], borrow=True)

batchsize = 10

network = Network(
    Conv1DLayer(rng, (64, 66, 25), (batchsize, 66, 240)),
    # For stable computation using batchnorm layer,
    # please ensure to normalize the features of the data
    # (3rd axis for style transfer data)
    BatchNormLayer(rng, (batchsize, 64, 240), axes=(
        0,
        2,
    )),
    ActivationLayer(rng, f='ReLU'),
    Pool1DLayer(rng, (2, ), (batchsize, 64, 240)),
    Conv1DLayer(rng, (128, 64, 25), (batchsize, 64, 120)),
    BatchNormLayer(rng, (batchsize, 128, 120), axes=(
        0,
        2,
    )),
    ActivationLayer(rng, f='ReLU'),