def __init__(self): super(DCSNet, self).__init__() self.main = nn.Sequential( # state size. 2 x 1025 ComplexConv1D(1, 16, 7, 2, 3, bias=False), #nn.BatchNorm2d(ngf * 4), nn.LeakyReLU(0.2, inplace=True), # state size. 32 x 513 ComplexConv1D(16, 32, 5, 2, 2, bias=False), #nn.BatchNorm2d(ngf * 2), nn.LeakyReLU(0.2, inplace=True), # state size. 64 x 257 ComplexConv1D(32, 32, 3, 2, 1, bias=False), #nn.BatchNorm2d(ngf), nn.LeakyReLU(0.2, inplace=True), # state size. 64 x 129 ComplexConv1D(32, 64, 3, 2, 1, bias=False), #nn.BatchNorm2d(ngf), nn.LeakyReLU(0.2, inplace=True), # state size. 128 x 65 ComplexConv1D(64, 64, 3, 2, 1, bias=False), #nn.BatchNorm2d(ngf), nn.LeakyReLU(0.2, inplace=True) # state size. 128 x 33 ) self.dense1 = ComplexDense(2112,1025, init_criterion='he') self.dense2 = ComplexDense(2112,1025, init_criterion='he')
def get_shallow_convnet(window_size=4096, channels=2, output_size=84): inputs = Input(shape=(window_size, channels)) conv = ComplexConv1D(32, 512, strides=16, activation='relu')(inputs) pool = AveragePooling1D(pool_size=4, strides=2)(conv) pool = Permute([2, 1])(pool) flattened = Flatten()(pool) dense = ComplexDense(2048, activation='relu')(flattened) predictions = ComplexDense(output_size, activation='sigmoid', bias_initializer=Constant(value=-5))(dense) predictions = GetReal(predictions) model = Model(inputs=inputs, outputs=predictions) model.compile(optimizer=Adam(lr=1e-4), loss='binary_crossentropy', metrics=['accuracy']) return model
def get_deep_convnet(window_size=4096, channels=2, output_size=84): inputs = Input(shape=(window_size, channels)) outs = inputs outs = (ComplexConv1D( 16, 6, strides=2, padding='same', activation='linear', kernel_initializer='complex_independent'))(outs) outs = (ComplexBN(axis=-1))(outs) outs = (keras.layers.Activation('relu'))(outs) outs = (keras.layers.AveragePooling1D(pool_size=2, strides=2))(outs) outs = (ComplexConv1D( 32, 3, strides=2, padding='same', activation='linear', kernel_initializer='complex_independent'))(outs) outs = (ComplexBN(axis=-1))(outs) outs = (keras.layers.Activation('relu'))(outs) outs = (keras.layers.AveragePooling1D(pool_size=2, strides=2))(outs) outs = (ComplexConv1D( 64, 3, strides=1, padding='same', activation='linear', kernel_initializer='complex_independent'))(outs) outs = (ComplexBN(axis=-1))(outs) outs = (keras.layers.Activation('relu'))(outs) outs = (keras.layers.AveragePooling1D(pool_size=2, strides=2))(outs) outs = (ComplexConv1D( 64, 3, strides=1, padding='same', activation='linear', kernel_initializer='complex_independent'))(outs) outs = (ComplexBN(axis=-1))(outs) outs = (keras.layers.Activation('relu'))(outs) outs = (keras.layers.AveragePooling1D(pool_size=2, strides=2))(outs) outs = (ComplexConv1D( 128, 3, strides=1, padding='same', activation='relu', kernel_initializer='complex_independent'))(outs) outs = (ComplexConv1D( 128, 3, strides=1, padding='same', activation='linear', kernel_initializer='complex_independent'))(outs) outs = (ComplexBN(axis=-1))(outs) outs = (keras.layers.Activation('relu'))(outs) outs = (keras.layers.AveragePooling1D(pool_size=2, strides=2))(outs) #outs = (keras.layers.MaxPooling1D(pool_size=2)) #outs = (Permute([2, 1])) outs = (keras.layers.Flatten())(outs) outs = (keras.layers.Dense(2048, activation='relu', kernel_initializer='glorot_normal'))(outs) predictions = (keras.layers.Dense(output_size, activation='sigmoid', bias_initializer=keras.initializers.Constant(value=-5)))(outs) model = Model(inputs=inputs, outputs=predictions) model.compile(optimizer=keras.optimizers.Adam(lr=1e-4), loss='binary_crossentropy', metrics=['accuracy']) return model