Ejemplo n.º 1
0
    def make_net(self):
        input_dim = self.word_feat_len
        output_dim = self.word_feat_len

        encoder_inputs = Input(shape=(None, input_dim))
        encoder_dense_outputs = Dense(
            input_dim, activation='sigmoid')(encoder_inputs)
        encoder_bi_lstm = LSTM(
            self.latent_dim, return_sequences=True, dropout=0.6, recurrent_dropout=0.6)
        encoder_bi_outputs = Bi(encoder_bi_lstm)(encoder_dense_outputs)
        _, state_h, state_c = LSTM(self.latent_dim, return_state=True,
                                   dropout=0.2, recurrent_dropout=0.2)(encoder_bi_outputs)
        encoder_states = [state_h, state_c]

        decoder_inputs = Input(shape=(None, input_dim))
        decoder_dense_outputs = Dense(
            input_dim, activation='sigmoid')(decoder_inputs)
        decoder_bi_lstm = LSTM(
            self.latent_dim, return_sequences=True, dropout=0.6, recurrent_dropout=0.6)
        decoder_bi_outputs = Bi(decoder_bi_lstm)(decoder_dense_outputs)
        decoder_lstm = LSTM(self.latent_dim, return_sequences=True,
                            return_state=True, dropout=0.2, recurrent_dropout=0.2)
        decoder_outputs, _, _ = decoder_lstm(
            decoder_bi_outputs, initial_state=encoder_states)
        decoder_outputs = Dense(output_dim, activation='relu')(decoder_outputs)
        decoder_outputs = Dense(
            output_dim, activation='linear')(decoder_outputs)

        self.sequence_autoencoder = Model(
            [encoder_inputs, decoder_inputs], decoder_outputs)
Ejemplo n.º 2
0
    def build_decoder(self):
        K.set_learning_phase(1)
        decoder_input = Input(shape=(None, NNVAL.input_dim))
        state_h = Input(shape=(NNVAL.encoder_latent_dim, ))
        state_c = Input(shape=(NNVAL.encoder_latent_dim, ))

        decoder_dense_outputs = Dense(NNVAL.decoder_latent_dim,
                                      activation='sigmoid')(decoder_input)
        decoder_bi_lstm = LSTM(NNVAL.decoder_latent_dim,
                               return_sequences=True,
                               dropout=0.6,
                               recurrent_dropout=0.6)
        decoder_bi_outputs = Bi(decoder_bi_lstm)(decoder_dense_outputs)
        decoder_lstm = LSTM(NNVAL.decoder_latent_dim,
                            return_sequences=True,
                            return_state=True,
                            dropout=0.4,
                            recurrent_dropout=0.4)

        encoder_states = [state_h, state_c]
        decoder_output, output_h, output_c = decoder_lstm(
            decoder_bi_outputs, initial_state=encoder_states)

        decoder_output = Dense(NNVAL.decoder_latent_dim,
                               activation='tanh')(decoder_output)
        decoder_output = Dropout(0.2)(decoder_output)
        decoder_output = Dense(NNVAL.output_dim,
                               activation='linear')(decoder_output)

        return Container([decoder_input, state_h, state_c],
                         [decoder_output, output_h, output_c])
Ejemplo n.º 3
0
    def build_decoder(self, model=None):
        K.set_learning_phase(1)  # set learning phase

        encoder_h = Input(shape=(self.latent_dim, ))
        encoder_c = Input(shape=(self.latent_dim, ))
        encoder_states = [encoder_h, encoder_c]

        decoder_inputs = Input(shape=(None, self.input_dim))
        decoder_dense_outputs = Dense(self.input_dim,
                                      activation='sigmoid')(decoder_inputs)
        decoder_bi_lstm = LSTM(self.latent_dim,
                               return_sequences=True,
                               dropout=0.6,
                               recurrent_dropout=0.6)
        decoder_bi_outputs = Bi(decoder_bi_lstm)(decoder_dense_outputs)
        decoder_lstm = LSTM(self.latent_dim,
                            return_sequences=True,
                            return_state=True)
        decoder_outputs, _, _ = decoder_lstm(decoder_bi_outputs,
                                             initial_state=encoder_states)
        decoder_outputs = Dense(self.output_dim,
                                activation='relu')(decoder_outputs)
        decoder_outputs = Dense(self.output_dim,
                                activation='linear')(decoder_outputs)

        return Model([decoder_inputs, encoder_h, encoder_c], decoder_outputs)
Ejemplo n.º 4
0
 def build_encoder(self):
     K.set_learning_phase(1)
     encoder_input = Input(shape=(None, NNVAL.input_dim))
     encoder_dense_outputs = Dense(NNVAL.encoder_latent_dim,
                                   activation='sigmoid')(encoder_input)
     encoder_bi_lstm = LSTM(NNVAL.encoder_latent_dim,
                            return_sequences=True,
                            dropout=0.4,
                            recurrent_dropout=0.4)
     encoder_bi_outputs = Bi(encoder_bi_lstm)(encoder_dense_outputs)
     _, state_h, state_c = LSTM(NNVAL.encoder_latent_dim,
                                return_state=True,
                                dropout=0.2,
                                recurrent_dropout=0.2)(encoder_bi_outputs)
     return Container(encoder_input, [state_h, state_c])
Ejemplo n.º 5
0
    def build_encoder(self, model=None):
        K.set_learning_phase(1)  # set learning phase

        encoder_inputs = Input(shape=(None, self.input_dim))
        encoder_dense_outputs = Dense(self.input_dim,
                                      activation='sigmoid')(encoder_inputs)
        encoder_bi_lstm = LSTM(self.latent_dim,
                               return_sequences=True,
                               dropout=0.6,
                               recurrent_dropout=0.6)
        encoder_bi_outputs = Bi(encoder_bi_lstm)(encoder_dense_outputs)
        _, state_h, state_c = LSTM(self.latent_dim,
                                   return_state=True,
                                   dropout=0.2,
                                   recurrent_dropout=0.2)(encoder_bi_outputs)

        return Model(encoder_inputs, [state_h, state_c])
Ejemplo n.º 6
0
import keras.backend as K
import numpy as np
import random
import sys
import pickle
import json
import glob
import copy
import os
import re
import time
import concurrent.futures
import threading 

inputs      = Input( shape=(100,256) ) 
encoded     = Bi( GRU(300, activation='relu', return_sequences=False, dropout=0.1, recurrent_dropout=0.1) )( inputs )
encoded     = Dense(2012, activation='relu')( encoded )
encoded     = Dense(2012, activation='relu')( encoded )
encoded     = Dense(1012, activation='tanh')( encoded )
encoder     = Model(inputs, encoded)

decoded_1   = Bi( GRU(300, activation='relu', dropout=0.1, recurrent_dropout=0.1, return_sequences=True) )( RepeatVector(100)( encoded ) )
decoded_1   = TD( Dense(2024, activation='relu') )( decoded_1 )
decoded_1   = TD( Dense(2024, activation='relu') )( decoded_1 )
decoded_1   = TD( Dense(256, activation='linear') )( decoded_1 )

decoded_2   = Bi( GRU(300, activation='relu', dropout=0.1, recurrent_dropout=0.1, return_sequences=True) )( RepeatVector(100)( encoded ) )
decoded_2   = TD( Dense(2024, activation='relu') )( decoded_2 )
decoded_2   = TD( Dense(2024, activation='relu') )( decoded_2 )
decoded_2   = TD( Dense(256, activation='linear') )( decoded_2 )
input_tensor1 = Input(shape=(150, 150, 3))
vgg_model = VGG16(include_top=False,
                  weights='imagenet',
                  input_tensor=input_tensor1)
vgg_x = vgg_model.layers[-1].output
vgg_x = Flatten()(vgg_x)
vgg_x = Dense(1800, activation='relu')(vgg_x)
vgg_x = RepeatVector(20)(vgg_x)

input_tensor2 = Input(shape=(20, 1800))

encoded = Concatenate(axis=1)([vgg_x, input_tensor2])

x = Bi(
    LSTM(500,
         recurrent_dropout=0.05,
         recurrent_activation='tanh',
         return_sequences=False))(encoded)
x = Dropout(0.10)(x)
x = Dense(2600, activation='relu')(x)
x = Dropout(0.10)(x)
x = Dense(2600, activation='relu')(x)
x = Dropout(0.10)(x)
decoded = Dense(1800, activation='softmax')(x)

model = Model([input_tensor1, input_tensor2], decoded)
model.compile(optimizer=Adam(), loss='categorical_crossentropy')
"""
0 <keras.engine.topology.InputLayer object at 0x7f9ecfcea4a8>
1 <keras.layers.convolutional.Conv2D object at 0x7f9ece6220f0>
2 <keras.layers.convolutional.Conv2D object at 0x7f9e8deb02e8>
from keras.layers.noise import GaussianNoise as GN
from keras.optimizers import SGD, Adam, RMSprop
from keras import backend as K
from keras.layers.wrappers import Bidirectional as Bi
from keras.layers.wrappers import TimeDistributed as TD

input_tensor = Input(shape=(1, 3135))

enc = input_tensor
enc = Flatten()(enc)
enc = RepeatVector(30)(enc)
enc = GRU(256, dropout=0.15, recurrent_dropout=0.1, return_sequences=True)(enc)
enc = TD(Dense(3000, activation='relu'))(enc)
enc = Dropout(0.25)(enc)

dec = Bi(GRU(512, dropout=0.30, recurrent_dropout=0.25,
             return_sequences=True))(enc)
dec = TD(Dense(3000, activation='relu'))(dec)
dec = Dropout(0.5)(dec)
dec = TD(Dense(3000, activation='relu'))(dec)
dec = Dropout(0.1)(dec)
decode = TD(Dense(3135, activation='softmax'))(dec)

model = Model(inputs=input_tensor, outputs=decode)
model.compile(loss='categorical_crossentropy', optimizer='adam')

import pickle
import gzip
import numpy as np
import glob
import sys
Ejemplo n.º 9
0
"""
inputs      = Input(shape=(timesteps, DIM))
encoded     = GRU(512)(inputs)
"""
print(vgg_x.shape)
DIM = 128
timesteps = 50
print(vgg_x.shape)
inputs = RepeatVector(timesteps)(vgg_x)
encoded = LSTM(256)(inputs)
encoder = Model(input_tensor, encoded)
""" encoder側は、基本的にRNNをスタックしない """
timesteps = 50
DIM = 128
x = RepeatVector(timesteps)(encoded)
x = Bi(LSTM(256, return_sequences=True))(x)
#x           = LSTM(512, return_sequences=True)(x)
decoded = TD(Dense(DIM, activation='softmax'))(x)

t2i = Model(input_tensor, decoded)
t2i.compile(optimizer=Adam(), loss='categorical_crossentropy')
"""
0 <keras.engine.topology.InputLayer object at 0x7f9ecfcea4a8>
1 <keras.layers.convolutional.Conv2D object at 0x7f9ece6220f0>
2 <keras.layers.convolutional.Conv2D object at 0x7f9e8deb02e8>
3 <keras.layers.pooling.MaxPooling2D object at 0x7f9e8de4ee10>
4 <keras.layers.convolutional.Conv2D object at 0x7f9e8de58550>
5 <keras.layers.convolutional.Conv2D object at 0x7f9e8de62e10>
6 <keras.layers.pooling.MaxPooling2D object at 0x7f9e8de6bf60>
7 <keras.layers.convolutional.Conv2D object at 0x7f9e8ddfe5c0>
8 <keras.layers.convolutional.Conv2D object at 0x7f9e8de06c50>
import sys
import pickle
import json
import glob
import copy
import os
import re
import time
import concurrent.futures
import threading 

WIDTH       = 256
ACTIVATOR   = 'tanh'
DO          = Dropout(0.1)
inputs      = Input( shape=(20, WIDTH) ) 
encoded     = Bi( LSTM(512, kernel_initializer='lecun_uniform', activation=ACTIVATOR, return_sequences=True) )(inputs)
encoded     = TD( Dense(512, kernel_initializer='lecun_uniform', activation=ACTIVATOR) )( encoded )
encoded     = Flatten()( encoded )
encoded     = Dense(512, kernel_initializer='lecun_uniform', activation='linear')( encoded )
encoder     = Model(inputs, encoded)

decoded_1   = Bi( LSTM(512, kernel_initializer='lecun_uniform', activation=ACTIVATOR, return_sequences=True) )( RepeatVector(20)( encoded ) )
decoded_1   = TD( Dense(256) )( decoded_1 )

decoded_2   = Bi( LSTM(511, kernel_initializer='lecun_uniform', activation=ACTIVATOR, return_sequences=True) )( RepeatVector(20)( encoded ) )
decoded_2   = TD( Dense(256) )( decoded_1 )

skipthought = Model( inputs, [decoded_1, decoded_2] )
skipthought.compile( optimizer=Adam(), loss='mean_squared_logarithmic_error' )
  
buff = None
Ejemplo n.º 11
0
from keras.layers.core          import Dropout
from keras.layers.merge         import Concatenate as Concat
from keras.layers.noise         import GaussianNoise as GN
from keras.layers.merge         import Dot,Multiply
import numpy as np
import random
import sys
import pickle
import glob
import copy
import os
import re
import time

input_tensor1 = Input(shape=(50, 16))
x           = Bi(CuDNNLSTM(300, return_sequences=True))(input_tensor1)
x           = TD(Dense(500, activation='relu'))(x)
x           = Bi(CuDNNLSTM(300, return_sequences=True))(x)
x           = TD(Dense(500, activation='relu'))(x)
x           = TD(Dense(20, activation='relu'))(x)
decoded     = Dense(1, activation='linear')(x)
print(decoded.shape)
model       = Model(input_tensor1, decoded)
model.compile(RMSprop(lr=0.0001, decay=0.03), loss='mae')

buff = None
now  = time.strftime("%H_%M_%S")
def callback(epoch, logs):
  global buff
  buff = copy.copy(logs)
batch_callback = LambdaCallback(on_epoch_end=lambda batch,logs: callback(batch,logs) )
Ejemplo n.º 12
0
import keras.backend as K
import numpy as np
import random
import sys
import pickle
import gzip
import copy
import os
import re
from pathlib import Path

char_index = { char:index for index, char in enumerate(list('abcdefghijklmnpqrstvwxyz!? ')) } 
index_char = { index:char for index, char in enumerate(list('abcdefghijklmnpqrstvwxyz!? ')) } 

inputs      = Input(shape=(4, len(char_index)) ) 
encoded     = Bi(GRU(512, activation='relu', return_sequences=True))(inputs)
encoded     = Flatten()(encoded)
encoded     = Dense(5000, activation='relu')(encoded)
encoded     = Dense(5000, activation='relu')(encoded)
encoded     = Dense(1, activation='linear')(encoded)


spectre = Model(inputs, encoded)
spectre.compile(optimizer=Adam(), loss='mae')

buff = None
def callback(epoch, logs):
  global buff
  buff = copy.copy(logs)
batch_callback = LambdaCallback(on_epoch_end=lambda batch,logs: callback(batch,logs) )
Ejemplo n.º 13
0
timesteps   = 250
input_dim   = 2
latent_dim  = 256
vae_dim     = 16
inputs      = Input(shape=(timesteps, input_dim))
encoded     = LSTM( 128 )(inputs)
z_mean      = Dense( vae_dim )( encoded )
z_log_sigma = Dense( vae_dim )( encoded )
z           = Lambda( sampling, output_shape=( vae_dim ,) )( [z_mean, z_log_sigma] )
encoder     = Model( inputs, encoded )

print( "encoded", encoded.shape )
print( "z", z.shape )
print( "rz", RepeatVector( 4 )( z ) )
decoded     = RepeatVector( timesteps )( z )
decoded     = Bi( LSTM( 128, return_sequences=True) )( decoded )
decoded     = TD( Dense(2, activation='linear') )( decoded )
encoder_decoder = Model(inputs, decoded)

# intermediate_dim = 256
# original_dim     = 1024
# x = Input(batch_shape=(batch_size, original_dim))
# h = Dense(intermediate_dim, activation='relu')(x)


def train():
  xss, yss = [], []
  trains = glob.glob("jsons/train_*.json")
  for o in range(batch_size*200):
    xs = [ [0.0]*2  for i in range(250) ]
    ys = [ [0.0]*2  for i in range(250) ]
Ejemplo n.º 14
0
                  weights='imagenet',
                  input_tensor=input_tensor)
vgg_x = vgg_model.layers[-1].output
vgg_x = Flatten()(vgg_x)
vgg_x = Dense(1024)(vgg_x)

DIM = 128
timesteps = 10
inputs = RepeatVector(timesteps)(vgg_x)
encoded = LSTM(1024)(inputs)
encoder = Model(input_tensor, encoded)

timesteps = 50
DIM = 128
x = RepeatVector(timesteps)(encoded)
x = Bi(LSTM(1024, return_sequences=True))(x)
decoded = TD(Dense(DIM, activation='softmax'))(x)

t2i = Model(input_tensor, decoded)
t2i.compile(optimizer=Adam(), loss='categorical_crossentropy')

for i, layer in enumerate(t2i.layers):  # default 15
    print(i, layer)

for layer in t2i.layers[:18]:  # 18 is max of VGG16
    layer.trainable = False
    ...

buff = None

Ejemplo n.º 15
0
import os
import re
import gzip
import json

VOC_SIZE = 3504
in_timesteps = 300
out_timesteps = 100
inputs = Input(shape=(in_timesteps, VOC_SIZE))
encoded = Dense(512, activation='relu')(inputs)
encoded = Flatten()(encoded)
encoded = Dense(1024, activation='relu')(encoded)
encoder = Model(inputs, encoded)

x = RepeatVector(out_timesteps)(encoded)
x = Bi(GRU(300, return_sequences=True))(x)
x = TD(Dense(VOC_SIZE * 2, activation='relu'))(x)
decoded = TD(Dense(VOC_SIZE, activation='softmax'))(x)

autoencoder = Model(inputs, decoded)
autoencoder.compile(optimizer=Adam(), loss='categorical_crossentropy')

buff = None


def callbacks(epoch, logs):
    global buff
    buff = copy.copy(logs)
    print("epoch", epoch)
    print("logs", logs)
Ejemplo n.º 16
0
from keras.regularizers import l2
from keras.layers.core import Reshape, Dropout
from keras.layers.normalization import BatchNormalization as BN
import keras.backend as K
import numpy as np
import random
import sys
import pickle
import glob
import copy
import os
import re

timesteps = 100
inputs = Input(shape=(timesteps, 29))
x = Bi(GRU(512, dropout=0.10, recurrent_dropout=0.25,
           return_sequences=True))(inputs)
x = TD(Dense(3000, activation='relu'))(x)
x = Dropout(0.2)(x)
x = TD(Dense(3000, activation='relu'))(x)
x = Dropout(0.2)(x)
x = TD(Dense(29, activation='softmax'))(x)

decript = Model(inputs, x)
decript.compile(optimizer=Adam(), loss='categorical_crossentropy')

buff = None


def callbacks(epoch, logs):
    global buff
    buff = copy.copy(logs)
Ejemplo n.º 17
0
from keras.callbacks import LearningRateScheduler as LRS
import numpy as np
import random
import sys
import glob
import pickle
import re

WIDTH = 2029
MAXLEN = 31
INPUTLEN = 1000
inputs = Input(shape=(INPUTLEN, ))
#enc         = Dense(2024, activation='linear')( inputs )
#enc         = Dense(1024, activation='tanh')( enc )
repeat = RepeatVector(31)(inputs)
generated = Bi(GRU(256, return_sequences=True))(repeat)
generated = TD(Dense(2049, activation='relu'))(generated)
generated = TD(Dense(2049, activation='softmax'))(generated)
#generator   = Model( inputs, generated )

#generated   = Lambda( lambda x:x*2.0 )(generated)

generator = Model(inputs, generated)
generator.compile(optimizer=Adam(), loss='categorical_crossentropy')


def train_base():
    for ge, name in enumerate(glob.glob('utils/dataset_*.pkl')[:1]):
        dataset = pickle.loads(open(name, 'rb').read())
        xs, ys = dataset
        for e in range(10):
Ejemplo n.º 18
0
import glob
import copy
import os
import re
import time

width = 20
input1 = Input(shape=(width, 1))
x = TD(Dense(1000, activation='linear'))(input1)
x = GRU(300,
        activation='linear',
        recurrent_activation='linear',
        return_sequences=True)(x)
x = Bi(
    GRU(300,
        activation='linear',
        recurrent_activation='linear',
        return_sequences=True))(x)
x = TD(Dense(3000, activation='linear'))(x)
x = TD(Dense(3000, activation='linear'))(x)
output = TD(Dense(1, activation='linear'))(x)

model = Model(input1, output)
model.compile(RMSprop(lr=0.0001, decay=0.03), loss='mae')

buff = None
now = time.strftime("%H_%M_%S")


def callback(epoch, logs):
    global buff
Ejemplo n.º 19
0
import os
import re


timesteps   = 50
inputs      = Input(shape=(timesteps, 128))
encoded     = LSTM(512)(inputs)
inputs_a    = inputs
inputs_a    = Dense(2048)(inputs_a)
inputs_a    = BN()(inputs_a)
a_vector    = Dense(512, activation='softmax')(Flatten()(inputs_a))
mul         = multiply([encoded, a_vector]) 
encoder     = Model(inputs, mul)

x           = RepeatVector(timesteps)(mul)
x           = Bi(LSTM(512, return_sequences=True))(x)
decoded     = TD(Dense(128, activation='softmax'))(x)

autoencoder = Model(inputs, decoded)
autoencoder.compile(optimizer=Adam(), loss='categorical_crossentropy')

buff = None
def callbacks(epoch, logs):
  global buff
  buff = copy.copy(logs)
  print("epoch" ,epoch)
  print("logs", logs)

def train():
  c_i = pickle.loads( open("dataset/c_i.pkl", "rb").read() )
  xss = []