Exemple #1
0
def rnn_lstm(seq_length=3, num_outputs=2, image_shape=(120,160,3)):

    from keras.layers import Input, Dense
    from keras.models import Sequential
    from keras.layers import Convolution2D, MaxPooling2D, Reshape, BatchNormalization
    from keras.layers import Activation, Dropout, Flatten, Cropping2D, Lambda
    from keras.layers.merge import concatenate
    from keras.layers import LSTM
    from keras.layers.wrappers import TimeDistributed as TD

    img_seq_shape = (seq_length,) + image_shape   
    img_in = Input(batch_shape = img_seq_shape, name='img_in')
    
    x = Sequential()
    x.add(TD(Cropping2D(cropping=((60,0), (0,0))), input_shape=img_seq_shape )) #trim 60 pixels off top
    x.add(TD(Convolution2D(24, (5,5), strides=(2,2), activation='relu')))
    x.add(TD(Convolution2D(32, (5,5), strides=(2,2), activation='relu')))
    x.add(TD(Convolution2D(32, (3,3), strides=(2,2), activation='relu')))
    x.add(TD(Convolution2D(32, (3,3), strides=(1,1), activation='relu')))
    x.add(TD(MaxPooling2D(pool_size=(2, 2))))
    x.add(TD(Flatten(name='flattened')))
    x.add(TD(Dense(100, activation='relu')))
    x.add(TD(Dropout(.1)))
      
    x.add(LSTM(128, return_sequences=True, name="LSTM_seq"))
    x.add(Dropout(.1))
    x.add(LSTM(128, return_sequences=False, name="LSTM_out"))
    x.add(Dropout(.1))
    x.add(Dense(128, activation='relu'))
    x.add(Dropout(.1))
    x.add(Dense(64, activation='relu'))
    x.add(Dense(10, activation='relu'))
    x.add(Dense(num_outputs, activation='linear', name='model_outputs'))
    
    return x
Exemple #2
0
from keras.losses import mean_squared_error
from pathlib import Path
import numpy as np
import random
import sys
import pickle
import gzip
import glob
import copy
import os
import re
import time

width = 20
input1 = Input(shape=(width, 1))
x = TD(Dense(1000, activation='linear'))(input1)
x = GRU(300,
        activation='linear',
        recurrent_activation='linear',
        return_sequences=True)(x)
x = Bi(
    GRU(300,
        activation='linear',
        recurrent_activation='linear',
        return_sequences=True))(x)
x = TD(Dense(3000, activation='linear'))(x)
x = TD(Dense(3000, activation='linear'))(x)
output = TD(Dense(1, activation='linear'))(x)

model = Model(input1, output)
model.compile(RMSprop(lr=0.0001, decay=0.03), loss='mae')
Exemple #3
0
    from keras.models import Sequential
    from keras.layers import Convolution2D, MaxPooling2D, Reshape, BatchNormalization, Merge
    from keras.layers import Activation, Dropout, Flatten, Cropping2D, Lambda
    from keras.layers.merge import concatenate
    from keras.layers import LSTM
    from keras.layers.wrappers import TimeDistributed as TD

<<<<<<< HEAD
    img_seq_shape = (seq_length,) + image_shape
=======
    img_seq_shape = (seq_length,) + image_shape   
>>>>>>> b1c6c28a6e813ea5a8a73fe6b7a93ca634c9ff27
    img_in = Input(batch_shape = img_seq_shape, name='img_in')
    
    x = Sequential()
    x.add(TD(Cropping2D(cropping=((60,0), (0,0))), input_shape=img_seq_shape )) #trim 60 pixels off top
    x.add(TD(Convolution2D(24, (5,5), strides=(2,2), activation='relu')))
    x.add(TD(Convolution2D(32, (5,5), strides=(2,2), activation='relu')))
    x.add(TD(Convolution2D(32, (3,3), strides=(2,2), activation='relu')))
    x.add(TD(Convolution2D(32, (3,3), strides=(1,1), activation='relu')))
    x.add(TD(MaxPooling2D(pool_size=(2, 2))))
    x.add(TD(Flatten(name='flattened')))
    x.add(TD(Dense(100, activation='relu')))
    x.add(TD(Dropout(.1)))
      
    x.add(LSTM(128, return_sequences=True, name="LSTM_seq"))
    x.add(Dropout(.1))
    x.add(LSTM(128, return_sequences=False, name="LSTM_out"))
    x.add(Dropout(.1))
    x.add(Dense(128, activation='relu'))
    x.add(Dropout(.1))
Exemple #4
0
import copy
import os
import re
import time
import concurrent.futures
import threading 

inputs      = Input( shape=(100,256) ) 
encoded     = Bi( GRU(300, activation='relu', return_sequences=False, dropout=0.1, recurrent_dropout=0.1) )( inputs )
encoded     = Dense(2012, activation='relu')( encoded )
encoded     = Dense(2012, activation='relu')( encoded )
encoded     = Dense(1012, activation='tanh')( encoded )
encoder     = Model(inputs, encoded)

decoded_1   = Bi( GRU(300, activation='relu', dropout=0.1, recurrent_dropout=0.1, return_sequences=True) )( RepeatVector(100)( encoded ) )
decoded_1   = TD( Dense(2024, activation='relu') )( decoded_1 )
decoded_1   = TD( Dense(2024, activation='relu') )( decoded_1 )
decoded_1   = TD( Dense(256, activation='linear') )( decoded_1 )

decoded_2   = Bi( GRU(300, activation='relu', dropout=0.1, recurrent_dropout=0.1, return_sequences=True) )( RepeatVector(100)( encoded ) )
decoded_2   = TD( Dense(2024, activation='relu') )( decoded_2 )
decoded_2   = TD( Dense(2024, activation='relu') )( decoded_2 )
decoded_2   = TD( Dense(256, activation='linear') )( decoded_2 )

skipthought = Model( inputs, [decoded_1, decoded_2] )
skipthought.compile( optimizer=Adam(), loss='mse' )
  
buff = None
now  = time.strftime("%H_%M_%S")
def callback(epoch, logs):
  global buff
from keras.preprocessing.image import ImageDataGenerator
from keras.layers.normalization import BatchNormalization as BN
from keras.layers.core import Dropout
from keras.layers.noise import GaussianNoise as GN
from keras.optimizers import SGD, Adam, RMSprop
from keras import backend as K
from keras.layers.wrappers import Bidirectional as Bi
from keras.layers.wrappers import TimeDistributed as TD

input_tensor = Input(shape=(1, 3135))

enc = input_tensor
enc = Flatten()(enc)
enc = RepeatVector(30)(enc)
enc = GRU(256, dropout=0.15, recurrent_dropout=0.1, return_sequences=True)(enc)
enc = TD(Dense(3000, activation='relu'))(enc)
enc = Dropout(0.25)(enc)

dec = Bi(GRU(512, dropout=0.30, recurrent_dropout=0.25,
             return_sequences=True))(enc)
dec = TD(Dense(3000, activation='relu'))(dec)
dec = Dropout(0.5)(dec)
dec = TD(Dense(3000, activation='relu'))(dec)
dec = Dropout(0.1)(dec)
decode = TD(Dense(3135, activation='softmax'))(dec)

model = Model(inputs=input_tensor, outputs=decode)
model.compile(loss='categorical_crossentropy', optimizer='adam')

import pickle
import gzip
Exemple #6
0
encoded     = GRU(512)(inputs)
"""
print(vgg_x.shape)
DIM = 128
timesteps = 50
print(vgg_x.shape)
inputs = RepeatVector(timesteps)(vgg_x)
encoded = LSTM(256)(inputs)
encoder = Model(input_tensor, encoded)
""" encoder側は、基本的にRNNをスタックしない """
timesteps = 50
DIM = 128
x = RepeatVector(timesteps)(encoded)
x = Bi(LSTM(256, return_sequences=True))(x)
#x           = LSTM(512, return_sequences=True)(x)
decoded = TD(Dense(DIM, activation='softmax'))(x)

t2i = Model(input_tensor, decoded)
t2i.compile(optimizer=Adam(), loss='categorical_crossentropy')
"""
0 <keras.engine.topology.InputLayer object at 0x7f9ecfcea4a8>
1 <keras.layers.convolutional.Conv2D object at 0x7f9ece6220f0>
2 <keras.layers.convolutional.Conv2D object at 0x7f9e8deb02e8>
3 <keras.layers.pooling.MaxPooling2D object at 0x7f9e8de4ee10>
4 <keras.layers.convolutional.Conv2D object at 0x7f9e8de58550>
5 <keras.layers.convolutional.Conv2D object at 0x7f9e8de62e10>
6 <keras.layers.pooling.MaxPooling2D object at 0x7f9e8de6bf60>
7 <keras.layers.convolutional.Conv2D object at 0x7f9e8ddfe5c0>
8 <keras.layers.convolutional.Conv2D object at 0x7f9e8de06c50>
9 <keras.layers.convolutional.Conv2D object at 0x7f9e8de0dfd0>
10 <keras.layers.pooling.MaxPooling2D object at 0x7f9e8de20cc0>
import pickle
import json
import glob
import copy
import os
import re
import time
import concurrent.futures
import threading 

WIDTH       = 256
ACTIVATOR   = 'tanh'
DO          = Dropout(0.1)
inputs      = Input( shape=(20, WIDTH) ) 
encoded     = Bi( LSTM(512, kernel_initializer='lecun_uniform', activation=ACTIVATOR, return_sequences=True) )(inputs)
encoded     = TD( Dense(512, kernel_initializer='lecun_uniform', activation=ACTIVATOR) )( encoded )
encoded     = Flatten()( encoded )
encoded     = Dense(512, kernel_initializer='lecun_uniform', activation='linear')( encoded )
encoder     = Model(inputs, encoded)

decoded_1   = Bi( LSTM(512, kernel_initializer='lecun_uniform', activation=ACTIVATOR, return_sequences=True) )( RepeatVector(20)( encoded ) )
decoded_1   = TD( Dense(256) )( decoded_1 )

decoded_2   = Bi( LSTM(511, kernel_initializer='lecun_uniform', activation=ACTIVATOR, return_sequences=True) )( RepeatVector(20)( encoded ) )
decoded_2   = TD( Dense(256) )( decoded_1 )

skipthought = Model( inputs, [decoded_1, decoded_2] )
skipthought.compile( optimizer=Adam(), loss='mean_squared_logarithmic_error' )
  
buff = None
now  = time.strftime("%H_%M_%S")
Exemple #8
0
import re
import gzip
import json

VOC_SIZE = 3504
in_timesteps = 300
out_timesteps = 100
inputs = Input(shape=(in_timesteps, VOC_SIZE))
encoded = Dense(512, activation='relu')(inputs)
encoded = Flatten()(encoded)
encoded = Dense(1024, activation='relu')(encoded)
encoder = Model(inputs, encoded)

x = RepeatVector(out_timesteps)(encoded)
x = Bi(GRU(300, return_sequences=True))(x)
x = TD(Dense(VOC_SIZE * 2, activation='relu'))(x)
decoded = TD(Dense(VOC_SIZE, activation='softmax'))(x)

autoencoder = Model(inputs, decoded)
autoencoder.compile(optimizer=Adam(), loss='categorical_crossentropy')

buff = None


def callbacks(epoch, logs):
    global buff
    buff = copy.copy(logs)
    print("epoch", epoch)
    print("logs", logs)

Exemple #9
0
from keras.layers.merge         import Concatenate as Concat
from keras.layers.noise         import GaussianNoise as GN
from keras.layers.merge         import Dot,Multiply
import numpy as np
import random
import sys
import pickle
import glob
import copy
import os
import re
import time

input_tensor1 = Input(shape=(50, 16))
x           = Bi(CuDNNLSTM(300, return_sequences=True))(input_tensor1)
x           = TD(Dense(500, activation='relu'))(x)
x           = Bi(CuDNNLSTM(300, return_sequences=True))(x)
x           = TD(Dense(500, activation='relu'))(x)
x           = TD(Dense(20, activation='relu'))(x)
decoded     = Dense(1, activation='linear')(x)
print(decoded.shape)
model       = Model(input_tensor1, decoded)
model.compile(RMSprop(lr=0.0001, decay=0.03), loss='mae')

buff = None
now  = time.strftime("%H_%M_%S")
def callback(epoch, logs):
  global buff
  buff = copy.copy(logs)
batch_callback = LambdaCallback(on_epoch_end=lambda batch,logs: callback(batch,logs) )
from keras.layers.normalization import BatchNormalization as BN
import keras.backend as K
import numpy as np
import random
import sys
import pickle
import glob
import copy
import os
import re

timesteps = 100
inputs = Input(shape=(timesteps, 29))
x = Bi(GRU(512, dropout=0.10, recurrent_dropout=0.25,
           return_sequences=True))(inputs)
x = TD(Dense(3000, activation='relu'))(x)
x = Dropout(0.2)(x)
x = TD(Dense(3000, activation='relu'))(x)
x = Dropout(0.2)(x)
x = TD(Dense(29, activation='softmax'))(x)

decript = Model(inputs, x)
decript.compile(optimizer=Adam(), loss='categorical_crossentropy')

buff = None


def callbacks(epoch, logs):
    global buff
    buff = copy.copy(logs)
    print("epoch", epoch)
import numpy as np
import random
import sys
import glob
import pickle
import re

WIDTH = 2029
MAXLEN = 31
INPUTLEN = 1000
inputs = Input(shape=(INPUTLEN, ))
#enc         = Dense(2024, activation='linear')( inputs )
#enc         = Dense(1024, activation='tanh')( enc )
repeat = RepeatVector(31)(inputs)
generated = Bi(GRU(256, return_sequences=True))(repeat)
generated = TD(Dense(2049, activation='relu'))(generated)
generated = TD(Dense(2049, activation='softmax'))(generated)
#generator   = Model( inputs, generated )

#generated   = Lambda( lambda x:x*2.0 )(generated)

generator = Model(inputs, generated)
generator.compile(optimizer=Adam(), loss='categorical_crossentropy')


def train_base():
    for ge, name in enumerate(glob.glob('utils/dataset_*.pkl')[:1]):
        dataset = pickle.loads(open(name, 'rb').read())
        xs, ys = dataset
        for e in range(10):
            adhoc.fit(xs, ys, epochs=50)
Exemple #12
0
# compile and train the model using the generator function
batch_size = 32
n_epoch = 2
batch_size_value = batch_size

train_generator = generator(train_samples, batch_size=batch_size_value, aug=0)
validation_generator = generator(validation_samples, batch_size=batch_size_value, aug=0)

img_shape=(180,320,3)
img_seq_shape = (seq_length,) + img_shape
# print(img_seq_shape)
#img_in = Input(batch_shape = img_seq_shape, name='img_in')

model = Sequential()
# trim image to only see section with road
model.add(TD(Cropping2D(cropping=((75,0), (0,0))), input_shape=img_seq_shape))
#LSTM model
model.add(TD(Convolution2D(24, (5, 5), activation="relu", name="conv_1", strides=(2, 2))))
model.add(TD(Convolution2D(36, (5, 5), activation="relu", name="conv_2", strides=(2, 2))))
model.add(TD(Convolution2D(48, (5, 5), activation="relu", name="conv_3", strides=(2, 2))))
#model.add(SpatialDropout2D(.5, dim_ordering='default'))
model.add(TD(Convolution2D(64, (3, 3), activation="relu", name="conv_4", strides=(2, 2))))
model.add(TD(Convolution2D(64, (3, 3), activation="relu", name="conv_5", strides=(1, 1))))

model.add(TD(MaxPooling2D(pool_size=(2, 2))))

model.add(TD(Flatten(name='flattened')))

model.add(TD(Dense(100, activation='relu')))
model.add(TD(Dropout(.1)))
input_dim   = 2
latent_dim  = 256
vae_dim     = 16
inputs      = Input(shape=(timesteps, input_dim))
encoded     = LSTM( 128 )(inputs)
z_mean      = Dense( vae_dim )( encoded )
z_log_sigma = Dense( vae_dim )( encoded )
z           = Lambda( sampling, output_shape=( vae_dim ,) )( [z_mean, z_log_sigma] )
encoder     = Model( inputs, encoded )

print( "encoded", encoded.shape )
print( "z", z.shape )
print( "rz", RepeatVector( 4 )( z ) )
decoded     = RepeatVector( timesteps )( z )
decoded     = Bi( LSTM( 128, return_sequences=True) )( decoded )
decoded     = TD( Dense(2, activation='linear') )( decoded )
encoder_decoder = Model(inputs, decoded)

# intermediate_dim = 256
# original_dim     = 1024
# x = Input(batch_shape=(batch_size, original_dim))
# h = Dense(intermediate_dim, activation='relu')(x)


def train():
  xss, yss = [], []
  trains = glob.glob("jsons/train_*.json")
  for o in range(batch_size*200):
    xs = [ [0.0]*2  for i in range(250) ]
    ys = [ [0.0]*2  for i in range(250) ]
    xjson = json.loads( open(trains[o]).read() )
import re


timesteps   = 50
inputs      = Input(shape=(timesteps, 128))
encoded     = LSTM(512)(inputs)
inputs_a    = inputs
inputs_a    = Dense(2048)(inputs_a)
inputs_a    = BN()(inputs_a)
a_vector    = Dense(512, activation='softmax')(Flatten()(inputs_a))
mul         = multiply([encoded, a_vector]) 
encoder     = Model(inputs, mul)

x           = RepeatVector(timesteps)(mul)
x           = Bi(LSTM(512, return_sequences=True))(x)
decoded     = TD(Dense(128, activation='softmax'))(x)

autoencoder = Model(inputs, decoded)
autoencoder.compile(optimizer=Adam(), loss='categorical_crossentropy')

buff = None
def callbacks(epoch, logs):
  global buff
  buff = copy.copy(logs)
  print("epoch" ,epoch)
  print("logs", logs)

def train():
  c_i = pickle.loads( open("dataset/c_i.pkl", "rb").read() )
  xss = []
  yss = []