예제 #1
0
 def norm_layer(x):
     norm = sk.Mex(1,
                   blocks=[int(x.shape[-3]), 1, 1],
                   softmax_mode=True,
                   normalize_offsets=False,
                   use_unshared_regions=False,
                   shared_offset_region=[-1],
                   offsets_initializer='zeros',
                   trainable=False)(x)
     tile = Reshape((int(x.shape[-2]) * int(x.shape[-1]), ))(norm)
     tile = RepeatVector(int(x.shape[-3]))(tile)
     tile = Reshape(x._keras_shape[-3:])(tile)
     tile = Lambda(lambda x: -x)(tile)
     normalized = add([x, tile])
     return normalized, norm
예제 #2
0
# Convert class vectors to binary class matrices.
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)

model = Sequential()
model.add(InputLayer(input_shape=(3, 32, 32)))
model.add(Conv2D(filters=32, kernel_size=(5,5), padding='same', use_bias=None,\
    data_format='channels_first', strides=(1,1)))
model.add(sk.Similarity(32, blocks=[1,1], strides=[1,1], similarity_function='L2',normalization_term=True, padding=[1,1],\
    out_of_bounds_value=np.nan, ignore_nan_input=True ))
model.add(
    sk.Mex(32,
           blocks=[1, 3, 3],
           strides=[32, 2, 2],
           softmax_mode=False,
           normalize_offsets=True,
           use_unshared_regions=True,
           unshared_offset_region=[2]))
model.add(
    sk.Mex(10,
           blocks=[1, 16, 16],
           strides=[32, 16, 16],
           softmax_mode=True,
           normalize_offsets=True,
           use_unshared_regions=True,
           unshared_offset_region=[2]))
model.add(Flatten(data_format='channels_first'))
model.summary()
# initiate SGD with nesterov optimizer
opt = keras.optimizers.SGD(lr=0.01, momentum=0.9, decay=0.0001, nesterov=True)
예제 #3
0
    normalization_term=True,
    padding=[2, 2],
    out_of_bounds_value=np.nan,
    ignore_nan_input=True,
    normalization_term_fudge=1e-4,
    weights_initializer=keras.initializers.Constant(value=100))(a)
i = 0
# last_norm=None
while b.shape[-2:] != (1, 1):
    mex_channels *= 2
    unshared = 2 if i < 1 else int(b.shape[-2])
    #b, b_norm = norm_layer(b)
    b = sk.Mex(mex_channels,
               blocks=[int(b.shape[-3]), 1, 1],
               strides=[int(b.shape[-3]), 1, 1],
               softmax_mode=True,
               normalize_offsets=True,
               use_unshared_regions=True,
               unshared_offset_region=[unshared],
               offsets_initializer=fixed_dirichlet_init)(b)
    b = sum_pooling_layer(b, pool_size=(2, 2))
    # b_norm = sum_pooling_layer(b_norm, pool_size=(2, 2))
    # if last_norm is None:
    #     last_norm = b_norm
    # else:
    #     last_norm = sum_pooling_layer(last_norm, pool_size=(2, 2))
    #     last_norm = add([last_norm, b_norm])
    i += 1
#b, b_norm = norm_layer(b)
b = sk.Mex(num_classes,
           blocks=[mex_channels, 1, 1],
           strides=[mex_channels, 1, 1],
예제 #4
0
# Convert class vectors to binary class matrices.
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)

model = Sequential()
model.add(InputLayer(input_shape=(3, 32, 32)))
model.add(Conv2D(filters=32, kernel_size=(5,5), padding='same', use_bias=None,\
    data_format='channels_first', strides=(1,1)))
model.add(sk.Similarity(32, blocks=[1,1], strides=[1,1], similarity_function='L2',normalization_term=True, padding=[1,1],\
    out_of_bounds_value=np.nan, ignore_nan_input=True ))
model.add(
    sk.Mex(32,
           blocks=[1, 3, 3],
           strides=[32, 2, 2],
           softmax_mode=False,
           normalize_offsets=True,
           use_unshared_regions=True,
           unshared_offset_region=[2]))
model.add(Conv2D(filters=64, kernel_size=(5,5), padding='same', use_bias=None,\
    data_format='channels_first', strides=(1,1)))
model.add(sk.Similarity(64, blocks=[1,1], strides=[1,1], similarity_function='L2',normalization_term=True, padding=[1,1],\
    out_of_bounds_value=np.nan, ignore_nan_input=True ))
model.add(
    sk.Mex(64,
           blocks=[1, 3, 3],
           strides=[64, 2, 2],
           softmax_mode=False,
           normalize_offsets=True,
           use_unshared_regions=True,
           unshared_offset_region=[2]))  #w/4 = 8
예제 #5
0
import numpy as np
np.random.seed(1)
with tf.device('/gpu:0'):
    data = np.arange(0, 1, 0.001, dtype=np.float32).reshape(-1, 1, 1, 1)
    y = (1 + data).reshape(-1, 1)
    batch_size = 100

    input_shape = (1, 1, 1)

    tf.set_random_seed(1)
    a = Input(shape=(1, 1, 1))

    b = sk.Mex(3,
               blocks=[1, 1, 1],
               strides=[1, 1, 1],
               softmax_mode=False,
               normalize_offsets=False,
               use_unshared_regions=False,
               shared_offset_region=[-1])
    bb = b
    b = b(a)
    b = GlobalAveragePooling2D()(b)
    b = Lambda(lambda x: tf.reduce_mean(x, 1, keep_dims=True))(b)
    model = Model(inputs=[a], outputs=[b])

    print(model.summary())
    #import sys
    #sys.exit(0)

    cb = TensorBoard(histogram_freq=1,
                     log_dir='/home/elhanani/study/huji-deep/install/logs',
예제 #6
0
           data_format='channels_first',
           use_bias=False)(a)  # attention! the strides cannot be 1
b = sk.Similarity(sim_channels,
                  blocks=[3, 3],
                  strides=[2, 2],
                  similarity_function='L2',
                  normalization_term=True,
                  padding=[2, 2],
                  out_of_bounds_value=np.nan,
                  ignore_nan_input=True)(b)
while b.shape[-2:] != (1, 1):
    mex_channels *= 2
    b = sk.Mex(mex_channels,
               blocks=[int(b.shape[-3]), 1, 1],
               strides=[int(b.shape[-3]), 1, 1],
               softmax_mode=True,
               normalize_offsets=True,
               use_unshared_regions=True,
               unshared_offset_region=[2])(b)
    b = sum_pooling_layer(b, pool_size=(2, 2))

b = sk.Mex(num_classes,
           blocks=[mex_channels, 1, 1],
           strides=[mex_channels, 1, 1],
           softmax_mode=True,
           normalize_offsets=True,
           use_unshared_regions=True,
           shared_offset_region=[1])(b)
b = Flatten(data_format='channels_first')(b)
model = Model(inputs=[a], outputs=[b])
예제 #7
0
 #                  # weights_initializer=tf.constant_initializer(weights['sim_weights']))(a)
 #b = a
 i = 0
 # last_norm=None
 bb = None
 while b.shape[-2:] != (1, 1):
     with tf.name_scope('block'):
         i += 1
         mex_channels *= 2
         unshared = 2  # if i < 1 else -1
         #b, b_norm = norm_layer(b)
         l = sk.Mex(mex_channels,
                    blocks=[int(b.shape[-3]), 1, 1],
                    strides=[int(b.shape[-3]), 1, 1],
                    softmax_mode=True,
                    normalize_offsets=True,
                    blocks_out_of_bounds_value=np.nan,
                    use_unshared_regions=True,
                    unshared_offset_region=[unshared],
                    offsets_initializer=tf.constant_initializer(
                        weights['mex' + str(i)]))
         print('Shape:', b.shape[-3])
         if bb is None:
             bb = l
             with open('/home/elhanani/tmp/mex_dict.txt', 'w') as f:
                 d = dict(ninstances=mex_channels,
                          blocks=[int(b.shape[-3]), 1, 1],
                          strides=[int(b.shape[-3]), 1, 1],
                          softmax_mode=True,
                          normalize_offsets=True,
                          blocks_out_of_bounds_value=np.nan,
                          use_unshared_regions=True,