def binary_cifar10_sbn(input, training=True): out = layers.binaryConv2d(input, 4, [3,3], [1,1], padding='VALID', use_bias=True, binarize_input=False, name='bc_conv2d_1_layer1') out = layers.spatial_shift_batch_norm(out, training=training, name='batchNormalization1_layer1') out = tf.clip_by_value(out, -1, 1) out = layers.binaryConv2d(out, 4, [3,3], [1,1], padding='SAME', use_bias=True, name='bnn_conv2d_1_layer2') out = tf.layers.max_pooling2d(out, [2,2], [2,2]) out = layers.spatial_shift_batch_norm(out, training=training, name='batchNormalization2_layer2') out = tf.clip_by_value(out, -1, 1) out = layers.binaryConv2d(out, 4, [3,3], [1,1], padding='SAME', use_bias=True, name='bnn_conv2d_2_layer3') out = layers.spatial_shift_batch_norm(out, training=training, name='batchNormalization3_layer3') out = tf.clip_by_value(out, -1, 1) out = layers.binaryConv2d(out, 4, [3,3], [1,1], padding='SAME', use_bias=True, name='bnn_conv2d_3_layer4') out = tf.layers.max_pooling2d(out, [2,2], [2,2]) out = layers.spatial_shift_batch_norm(out, training=training, name='batchNormalization4_layer4') out = tf.clip_by_value(out, -1, 1) out = layers.binaryConv2d(out, 4, [3,3], [1,1], padding='SAME', use_bias=True, name='bnn_conv2d_4_layer5') out = layers.spatial_shift_batch_norm(out, training=training, name='batchNormalization5_layer5') out = tf.clip_by_value(out, -1, 1) out = layers.binaryConv2d(out, 4, [3,3], [1,1], padding='SAME', use_bias=True, name='bnn_conv2d_5_layer6') out = tf.layers.max_pooling2d(out, [2,2], [2,2]) out = layers.spatial_shift_batch_norm(out, training=training, name='batchNormalization6_layer6') out = tf.clip_by_value(out, -1, 1) out = layers.binaryDense(out, 4, use_bias=True, name='binary_dense_1_layer7') out = layers.shift_batch_norm(out, training=training, name='batchNormalization7_layer7') out = tf.clip_by_value(out, -1, 1) out = layers.binaryDense(out, 4, use_bias=True, name='binary_dense_2_layer8') out = layers.shift_batch_norm(out, training=training, name='batchNormalization8_layer8') out = tf.clip_by_value(out, -1, 1) out = layers.binaryDense(out, 10, name='binary_dense_3_layer9') output = layers.shift_batch_norm(out, training=training, name='batchNormalization9_layer9') return input, output
def binary_mnist_sbn(input, training=True): fc1 = layers.binaryDense(input, 32, activation=None, name="binarydense1_layer1", binarize_input=False) bn1 = layers.shift_batch_norm(fc1, training=training, name="batchNormalization1_layer1") ac1 = tf.clip_by_value(bn1, -1, 1) fc2 = layers.binaryDense(ac1, 32, activation=None, name="binarydense2_layer2") bn2 = layers.shift_batch_norm(fc2, training=training, name="batchNormalization2_layer2") ac2 = tf.clip_by_value(bn2, -1, 1) fc3 = layers.binaryDense(ac2, 32, activation=None, name="binarydense3_layer3") bn3 = layers.shift_batch_norm(fc3, training=training, name="batchNormalization3_layer3") ac3 = tf.clip_by_value(bn3, -1, 1) fc4 = layers.binaryDense(ac3, 10, activation=None, name="binarydense4_layer4") output = layers.shift_batch_norm(fc4, training=training, name="batchNormalization4_layer4") return input, output
def binary_mnist_sbn(input, training=True): fc1 = layers.binaryDense(input, 2048, activation=None, name="binarydense1", binarize_input=False) bn1 = layers.shift_batch_norm(fc1, training=training, name="batch_norm1") ac1 = tf.clip_by_value(bn1, -1, 1) fc2 = layers.binaryDense(ac1, 2048, activation=None, name="binarydense2") bn2 = layers.shift_batch_norm(fc2, training=training, name="batch_norm2") ac2 = tf.clip_by_value(bn2, -1, 1) fc3 = layers.binaryDense(ac2, 2048, activation=None, name="binarydense3") bn3 = layers.shift_batch_norm(fc3, training=training, name="batch_norm3") ac3 = tf.clip_by_value(bn3, -1, 1) fc4 = layers.binaryDense(ac3, 10, activation=None, name="binarydense4") output = layers.shift_batch_norm(fc4, training=training, name="batch_norm4") return input, output
def binary_cifar10_sbn(input, training=True): out = layers.binaryConv2d(input, 128, [3, 3], [1, 1], padding='VALID', binarize_input=False, name='bc_conv2d_1') out = layers.spatial_shift_batch_norm(out, training=training, name='shift_batch_norm_1') out = tf.clip_by_value(out, -1, 1) out = layers.binaryConv2d(out, 128, [3, 3], [1, 1], padding='SAME', name='bnn_conv2d_1') out = tf.layers.max_pooling2d(out, [2, 2], [2, 2]) out = layers.spatial_shift_batch_norm(out, training=training, name='shift_batch_norm_2') out = tf.clip_by_value(out, -1, 1) out = layers.binaryConv2d(out, 256, [3, 3], [1, 1], padding='SAME', name='bnn_conv2d_2') out = layers.spatial_shift_batch_norm(out, training=training, name='shift_batch_norm_3') out = tf.clip_by_value(out, -1, 1) out = layers.binaryConv2d(out, 256, [3, 3], [1, 1], padding='SAME', name='bnn_conv2d_3') out = tf.layers.max_pooling2d(out, [2, 2], [2, 2]) out = layers.spatial_shift_batch_norm(out, training=training, name='shift_batch_norm_4') out = tf.clip_by_value(out, -1, 1) out = layers.binaryConv2d(out, 512, [3, 3], [1, 1], padding='SAME', name='bnn_conv2d_4') out = layers.spatial_shift_batch_norm(out, training=training, name='shift_batch_norm_5') out = tf.clip_by_value(out, -1, 1) out = layers.binaryConv2d(out, 512, [3, 3], [1, 1], padding='SAME', name='bnn_conv2d_5') out = tf.layers.max_pooling2d(out, [2, 2], [2, 2]) out = layers.spatial_shift_batch_norm(out, training=training, name='shift_batch_norm_6') out = tf.clip_by_value(out, -1, 1) out = layers.binaryDense(out, 1024, name='binary_dense_1') out = layers.shift_batch_norm(out, training=training, name='shift_batch_norm_7') out = tf.clip_by_value(out, -1, 1) out = layers.binaryDense(out, 1024, name='binary_dense_2') out = layers.shift_batch_norm(out, training=training, name='shift_batch_norm_8') out = tf.clip_by_value(out, -1, 1) out = layers.binaryDense(out, 10, name='binary_dense_3') output = layers.shift_batch_norm(out, training=training, name='shift_batch_norm_9') return input, output