def __init__(self, mi, fm_sizes, activation=tf.nn.relu):
    # conv1, conv2, conv3
    # note: # feature maps shortcut = # feauture maps conv 3
    assert(len(fm_sizes) == 3)

    # note: kernel size in 2nd conv is always 3
    #       so we won't bother including it as an arg

    self.session = None
    self.f = tf.nn.relu
    
    # init main branch
    # Conv -> BN -> F() ---> Conv -> BN -> F() ---> Conv -> BN
    self.conv1 = ConvLayer(1, mi, fm_sizes[0], 1)
    self.bn1   = BatchNormLayer(fm_sizes[0])
    self.conv2 = ConvLayer(3, fm_sizes[0], fm_sizes[1], 1, 'SAME')
    self.bn2   = BatchNormLayer(fm_sizes[1])
    self.conv3 = ConvLayer(1, fm_sizes[1], fm_sizes[2], 1)
    self.bn3   = BatchNormLayer(fm_sizes[2])

    # in case needed later
    self.layers = [
      self.conv1, self.bn1,
      self.conv2, self.bn2,
      self.conv3, self.bn3,
    ]

    # this will not be used when input passed in from
    # a previous layer
    self.input_ = tf.placeholder(tf.float32, shape=(1, 224, 224, mi))
    self.output = self.forward(self.input_)
Exemple #2
0
 def __init__(self):
     self.layers = [
         # before conv block
         ConvLayer(d=7, mi=3, mo=64, stride=2, padding='SAME'),
         BatchNormLayer(64),
         ReLULayer(),
         MaxPoolLayer(dim=3),
         # conv block
         ConvBlock(mi=64, fm_sizes=[64, 64, 256], stride=1),
     ]
     self.input_ = tf.placeholder(tf.float32, shape=(None, 224, 224, 3))
     self.output = self.forward(self.input_)
 def __init__(self):
     self.layers = [
         # before conv block
         ConvLayer(d=7, mi=3, mo=64, stride=2, padding='SAME'),
         BatchNormLayer(64),
         ReLULayer(),
         MaxPoolLayer(dim=3),
         # conv block
         ConvBlock(mi=64, fm_sizes=[64, 64, 256], stride=1),
     ]
     self.input_ = keras.Input(shape=(224, 224, 3), dtype=tf.float32)
     self.createModel_(self.input_)
Exemple #4
0
    def __init__(self, mi, fm_sizes, activation=tf.nn.relu):
        # conv1, conv2, conv3
        # note: # feature maps shortcut = # feauture maps conv 3
        assert (len(fm_sizes) == 3)

        # note: kernel size in 2nd conv is always 3
        #       so we won't bother including it as an arg

        self.session = None
        self.f = tf.nn.relu

        # init main branch
        # Conv -> BN -> F() ---> Conv -> BN -> F() ---> Conv -> BN
        self.conv1 = ConvLayer(1, mi, fm_sizes[0], 1)
        self.bn1 = BatchNormLayer(fm_sizes[0])
        self.conv2 = ConvLayer(3, fm_sizes[0], fm_sizes[1], 1, 'SAME')
        self.bn2 = BatchNormLayer(fm_sizes[1])
        self.conv3 = ConvLayer(1, fm_sizes[1], fm_sizes[2], 1)
        self.bn3 = BatchNormLayer(fm_sizes[2])

        # in case needed later
        self.layers = [
            self.conv1,
            self.bn1,
            self.conv2,
            self.bn2,
            self.conv3,
            self.bn3,
        ]

        # this will not be used when input passed in from
        # a previous layer
        self.input_ = tf.placeholder(tf.float32, shape=(1, 224, 224, mi))
        self.output = self.forward(self.input_)
Exemple #5
0
    def __init__(self, mi, fm_sizes, activation=tf.compat.v1.nn.relu):
        super(IdentityBlock, self).__init__()
        # conv1, conv2, conv3
        # note: # feature maps shortcut = # feauture maps conv 3
        assert (len(fm_sizes) == 3)

        # note: kernel size in 2nd conv is always 3
        #       so we won't bother including it as an arg

        self.session = None
        self.f = tf.compat.v1.nn.relu

        # init main branch
        # Conv -> BN -> F() ---> Conv -> BN -> F() ---> Conv -> BN
        self.conv1 = ConvLayer(1, mi, fm_sizes[0], 1)
        self.bn1 = BatchNormLayer(fm_sizes[0])
        self.conv2 = ConvLayer(3, fm_sizes[0], fm_sizes[1], 1)
        self.bn2 = BatchNormLayer(fm_sizes[1])
        self.conv3 = ConvLayer(1, fm_sizes[1], fm_sizes[2], 1)
        self.bn3 = BatchNormLayer(fm_sizes[2])

        # in case needed later
        self.layers = [
            self.conv1,
            self.bn1,
            self.conv2,
            self.bn2,
            self.conv3,
            self.bn3,
        ]
Exemple #6
0
	def __init__(self, c_in, fm_sizes, activation=tf.nn.relu):
		# and IdentityBlock consists of 3 ConvLayers:
		# conv1, conv2, conv3
		assert(len(fm_sizes) == 3)

		self.session = None
		self.activation = activation

		# note: stride is always 1

		# init main branch:
		# Conv -> BN -> activation ---> Conv -> BN -> activation ---> Conv -> BN
		self.conv1 = ConvLayer((1, 1), c_in, fm_sizes[0], stride=1)
		self.bn1 = BatchNormLayer(fm_sizes[0])
		self.conv2 = ConvLayer((3, 3), fm_sizes[0], fm_sizes[1], stride=1, padding='SAME')
		self.bn2 = BatchNormLayer(fm_sizes[1])
		self.conv3 = ConvLayer((1, 1), fm_sizes[1], fm_sizes[2], stride=1)
		self.bn3 = BatchNormLayer(fm_sizes[2])

		# for later use:
		self.layers = [
			self.conv1, self.bn1,
			self.conv2, self.bn2,
			self.conv3, self.bn3,
		]

		# next lines won't be used when using whole ResNet:
		self.input_ = tf.placeholder(tf.float32, shape=(1, 224, 224, c_in))
		self.output = self.forward(self.input_)
	def __init__(self):		
		self.layers = [
			# before ConvBlock:
			ConvLayer((7, 7), c_in=3, c_out=64, stride=2, padding='SAME'),
			BatchNormLayer(64), 
			ReLULayer(),
			MaxPoolLayer(dim=3),
			# ConvBlock:
			ConvBlock(c_in=64, fm_sizes=[64, 64, 256], stride=1),
		]

		self.input_ = tf.placeholder(tf.float32, shape=[None, 224, 224, 3])
		self.output = self.forward(self.input_)
Exemple #8
0
    def __init__(self):
        self.layers = [
            # before the first convblock:
            ConvLayer((7, 7), c_in=3, c_out=64, stride=2, padding='SAME'),
            BatchNormLayer(64),
            ReLULayer(),
            MaxPoolLayer(dim=3),

            # convblock:
            ConvBlock(c_in=64, fm_sizes=[64, 64, 256], stride=1),

            # identity block x 2:
            IdentityBlock(c_in=256, fm_sizes=[64, 64, 256]),
            IdentityBlock(c_in=256, fm_sizes=[64, 64, 256]),

            # convblock:
            ConvBlock(c_in=256, fm_sizes=[128, 128, 512], stride=2),

            # identity block x 3:
            IdentityBlock(c_in=512, fm_sizes=[128, 128, 512]),
            IdentityBlock(c_in=512, fm_sizes=[128, 128, 512]),
            IdentityBlock(c_in=512, fm_sizes=[128, 128, 512]),

            # convblock:
            ConvBlock(c_in=512, fm_sizes=[256, 256, 1024], stride=2),

            # identity block x 5:
            IdentityBlock(c_in=1024, fm_sizes=[256, 256, 1024]),
            IdentityBlock(c_in=1024, fm_sizes=[256, 256, 1024]),
            IdentityBlock(c_in=1024, fm_sizes=[256, 256, 1024]),
            IdentityBlock(c_in=1024, fm_sizes=[256, 256, 1024]),
            IdentityBlock(c_in=1024, fm_sizes=[256, 256, 1024]),

            # convblock:
            ConvBlock(c_in=1024, fm_sizes=[512, 512, 2048], stride=2),

            # identity block x 2:
            IdentityBlock(c_in=2048, fm_sizes=[512, 512, 2048]),
            IdentityBlock(c_in=2048, fm_sizes=[512, 512, 2048]),

            # pool / flatten / dense:
            AvgPool(kernel_size=7),
            Flatten(),
            DenseLayer(m_in=2048, m_out=1000)
        ]

        self.input_ = tf.placeholder(tf.float32, shape=(None, 224, 224, 3))
        self.output = self.forward(self.input_)
 def __init__(self):
     self.layers = [
         # before conv block
         ConvLayer(d=7, mi=3, mo=64, stride=2, padding='SAME'),
         BatchNormLayer(64),
         ReLULayer(),
         MaxPoolLayer(dim=3),
         # conv block
         ConvBlock(mi=64, fm_sizes=[64, 64, 256], stride=1),
         # identity block x 2
         IdentityBlock(mi=256, fm_sizes=[64, 64, 256]),
         IdentityBlock(mi=256, fm_sizes=[64, 64, 256]),
         # conv block
         ConvBlock(mi=256, fm_sizes=[128, 128, 512], stride=2),
         # identity block x 3
         IdentityBlock(mi=512, fm_sizes=[128, 128, 512]),
         IdentityBlock(mi=512, fm_sizes=[128, 128, 512]),
         IdentityBlock(mi=512, fm_sizes=[128, 128, 512]),
         # conv block
         ConvBlock(mi=512, fm_sizes=[256, 256, 1024], stride=2),
         # identity block x 5
         IdentityBlock(mi=1024, fm_sizes=[256, 256, 1024]),
         IdentityBlock(mi=1024, fm_sizes=[256, 256, 1024]),
         IdentityBlock(mi=1024, fm_sizes=[256, 256, 1024]),
         IdentityBlock(mi=1024, fm_sizes=[256, 256, 1024]),
         IdentityBlock(mi=1024, fm_sizes=[256, 256, 1024]),
         # conv block
         ConvBlock(mi=1024, fm_sizes=[512, 512, 2048], stride=2),
         # identity block x 2
         IdentityBlock(mi=2048, fm_sizes=[512, 512, 2048]),
         IdentityBlock(mi=2048, fm_sizes=[512, 512, 2048]),
         # pool / flatten / dense
         AvgPool(ksize=7),
         Flatten(),
         DenseLayer(mi=2048, mo=1000)
     ]
     self.input_ = tf.compat.v1.placeholder(tf.float32,
                                            shape=(None, 224, 224, 3))
     self.output = self.forward(self.input_)
    def __init__(self, mi, fm_sizes, activation=tf.nn.relu):
        assert (len(fm_sizes) == 3)
        self.session = None
        self.f = tf.nn.relu
        self.conv1 = ConvLayer(1, mi, fm_sizes[0], 1)
        self.bn1 = BatchNormLayer(fm_sizes[0])
        self.conv2 = ConvLayer(3, fm_sizes[0], fm_sizes[1], 1, 'SAME')
        self.bn2 = BatchNormLayer(fm_sizes[1])
        self.conv3 = ConvLayer(1, fm_sizes[1], fm_sizes[2], 1)
        self.bn3 = BatchNormLayer(fm_sizes[2])
        self.layers = [
            self.conv1,
            self.bn1,
            self.conv2,
            self.bn2,
            self.conv3,
            self.bn3,
        ]

        self.input_ = tf.placeholder(tf.float32, shape=(1, 224, 224, mi))
        self.output = self.forward(self.input_)
Exemple #11
0
class IdentityBlock:
    def __init__(self, mi, fm_sizes, activation=tf.nn.relu):
        # conv1, conv2, conv3
        # note: # feature maps shortcut = # feauture maps conv 3
        assert (len(fm_sizes) == 3)

        # note: kernel size in 2nd conv is always 3
        #       so we won't bother including it as an arg

        self.session = None
        self.f = tf.nn.relu

        # init main branch
        # Conv -> BN -> F() ---> Conv -> BN -> F() ---> Conv -> BN
        self.conv1 = ConvLayer(1, mi, fm_sizes[0], 1)
        self.bn1 = BatchNormLayer(fm_sizes[0])
        self.conv2 = ConvLayer(3, fm_sizes[0], fm_sizes[1], 1, 'SAME')
        self.bn2 = BatchNormLayer(fm_sizes[1])
        self.conv3 = ConvLayer(1, fm_sizes[1], fm_sizes[2], 1)
        self.bn3 = BatchNormLayer(fm_sizes[2])

        # in case needed later
        self.layers = [
            self.conv1,
            self.bn1,
            self.conv2,
            self.bn2,
            self.conv3,
            self.bn3,
        ]

        # this will not be used when input passed in from
        # a previous layer
        self.input_ = tf.placeholder(tf.float32, shape=(1, 224, 224, mi))
        self.output = self.forward(self.input_)

    def forward(self, X):
        # main branch
        FX = self.conv1.forward(X)
        FX = self.bn1.forward(FX)
        FX = self.f(FX)
        FX = self.conv2.forward(FX)
        FX = self.bn2.forward(FX)
        FX = self.f(FX)
        FX = self.conv3.forward(FX)
        FX = self.bn3.forward(FX)

        return self.f(FX + X)

    def predict(self, X):
        assert (self.session is not None)
        return self.session.run(self.output, feed_dict={self.input_: X})

    def set_session(self, session):
        # need to make this a session
        # so assignment happens on sublayers too
        self.session = session
        self.conv1.session = session
        self.bn1.session = session
        self.conv2.session = session
        self.bn2.session = session
        self.conv3.session = session
        self.bn3.session = session

    def copyFromKerasLayers(self, layers):
        assert (len(layers) == 10)
        # <keras.layers.convolutional.Conv2D at 0x7fa44255ff28>,
        # <keras.layers.normalization.BatchNormalization at 0x7fa44250e7b8>,
        # <keras.layers.core.Activation at 0x7fa44252d9e8>,
        # <keras.layers.convolutional.Conv2D at 0x7fa44253af60>,
        # <keras.layers.normalization.BatchNormalization at 0x7fa4424e4f60>,
        # <keras.layers.core.Activation at 0x7fa442494828>,
        # <keras.layers.convolutional.Conv2D at 0x7fa4424a2da0>,
        # <keras.layers.normalization.BatchNormalization at 0x7fa44244eda0>,
        # <keras.layers.merge.Add at 0x7fa44245d5c0>,
        # <keras.layers.core.Activation at 0x7fa44240aba8>
        self.conv1.copyFromKerasLayers(layers[0])
        self.bn1.copyFromKerasLayers(layers[1])
        self.conv2.copyFromKerasLayers(layers[3])
        self.bn2.copyFromKerasLayers(layers[4])
        self.conv3.copyFromKerasLayers(layers[6])
        self.bn3.copyFromKerasLayers(layers[7])

    def get_params(self):
        params = []
        for layer in self.layers:
            params += layer.get_params()
        return params
Exemple #12
0
class IdentityBlock(keras.layers.Layer):
    def __init__(self, mi, fm_sizes, activation=tf.compat.v1.nn.relu):
        super(IdentityBlock, self).__init__()
        # conv1, conv2, conv3
        # note: # feature maps shortcut = # feauture maps conv 3
        assert (len(fm_sizes) == 3)

        # note: kernel size in 2nd conv is always 3
        #       so we won't bother including it as an arg

        self.session = None
        self.f = tf.compat.v1.nn.relu

        # init main branch
        # Conv -> BN -> F() ---> Conv -> BN -> F() ---> Conv -> BN
        self.conv1 = ConvLayer(1, mi, fm_sizes[0], 1)
        self.bn1 = BatchNormLayer(fm_sizes[0])
        self.conv2 = ConvLayer(3, fm_sizes[0], fm_sizes[1], 1)
        self.bn2 = BatchNormLayer(fm_sizes[1])
        self.conv3 = ConvLayer(1, fm_sizes[1], fm_sizes[2], 1)
        self.bn3 = BatchNormLayer(fm_sizes[2])

        # in case needed later
        self.layers = [
            self.conv1,
            self.bn1,
            self.conv2,
            self.bn2,
            self.conv3,
            self.bn3,
        ]

    def call(self, X):
        FX = self.conv1(X)
        FX = self.bn1(FX)
        FX = self.f(FX)
        FX = self.conv2(FX)
        FX = self.bn2(FX)
        FX = self.f(FX)
        FX = self.conv3(FX)
        FX = self.bn3(FX)
        # sum + activation
        Y = self.f(tf.math.add(FX, X))
        return Y

    def copyFromKerasLayers(self, layers):
        #assert(len(layers) == 10)
        # <keras.layers.convolutional.Conv2D at 0x7fa44255ff28>,
        # <keras.layers.normalization.BatchNormalization at 0x7fa44250e7b8>,
        # <keras.layers.core.Activation at 0x7fa44252d9e8>,
        # <keras.layers.convolutional.Conv2D at 0x7fa44253af60>,
        # <keras.layers.normalization.BatchNormalization at 0x7fa4424e4f60>,
        # <keras.layers.core.Activation at 0x7fa442494828>,
        # <keras.layers.convolutional.Conv2D at 0x7fa4424a2da0>,
        # <keras.layers.normalization.BatchNormalization at 0x7fa44244eda0>,
        # <keras.layers.merge.Add at 0x7fa44245d5c0>,
        # <keras.layers.core.Activation at 0x7fa44240aba8>
        self.conv1.copyFromKerasLayers(layers[0])
        self.bn1.copyFromKerasLayers(layers[1])
        self.conv2.copyFromKerasLayers(layers[3])
        self.bn2.copyFromKerasLayers(layers[4])
        self.conv3.copyFromKerasLayers(layers[6])
        self.bn3.copyFromKerasLayers(layers[7])

    def get_params(self):
        params = []
        for layer in self.layers:
            params += layer.get_params()
        return params
Exemple #13
0
class IdentityBlock:
	def __init__(self, c_in, fm_sizes, activation=tf.nn.relu):
		# and IdentityBlock consists of 3 ConvLayers:
		# conv1, conv2, conv3
		assert(len(fm_sizes) == 3)

		self.session = None
		self.activation = activation

		# note: stride is always 1

		# init main branch:
		# Conv -> BN -> activation ---> Conv -> BN -> activation ---> Conv -> BN
		self.conv1 = ConvLayer((1, 1), c_in, fm_sizes[0], stride=1)
		self.bn1 = BatchNormLayer(fm_sizes[0])
		self.conv2 = ConvLayer((3, 3), fm_sizes[0], fm_sizes[1], stride=1, padding='SAME')
		self.bn2 = BatchNormLayer(fm_sizes[1])
		self.conv3 = ConvLayer((1, 1), fm_sizes[1], fm_sizes[2], stride=1)
		self.bn3 = BatchNormLayer(fm_sizes[2])

		# for later use:
		self.layers = [
			self.conv1, self.bn1,
			self.conv2, self.bn2,
			self.conv3, self.bn3,
		]

		# next lines won't be used when using whole ResNet:
		self.input_ = tf.placeholder(tf.float32, shape=(1, 224, 224, c_in))
		self.output = self.forward(self.input_)


	def forward(self, X):
		# main branch:
		fX = self.conv1.forward(X)
		fX = self.bn1.forward(fX)
		fX = self.activation(fX)
		fX = self.conv2.forward(fX)
		fX = self.bn2.forward(fX)
		fX = self.activation(fX)
		fX = self.conv3.forward(fX)
		fX = self.bn3.forward(fX)
		
		# shortcut is just input data X
		return self.activation(fX + X)


	def predict(self, X):
		# we need to run the prediction in a session:
		assert(self.session is not None)
		return self.session.run(
			self.output,
			feed_dict={self.input_: X}
		)


	def set_session(self, session):
		# we need to set a session on every layer/sub-layer:
		self.session = session
		self.conv1.session = session
		self.bn1.session = session 
		self.conv2.session = session 
		self.bn2.session = session 
		self.conv3.session = session 
		self.bn3.session = session 


	def copyFromKerasLayers(self, layers):
		assert(len(layers) == 10)
		# <keras.layers.convolutional.Conv2D at 0x7fa44255ff28>,
		# <keras.layers.normalization.BatchNormalization at 0x7fa44250e7b8>,
		# <keras.layers.core.Activation at 0x7fa44252d9e8>,
		# <keras.layers.convolutional.Conv2D at 0x7fa44253af60>,
		# <keras.layers.normalization.BatchNormalization at 0x7fa4424e4f60>,
		# <keras.layers.core.Activation at 0x7fa442494828>,
		# <keras.layers.convolutional.Conv2D at 0x7fa4424a2da0>,
		# <keras.layers.normalization.BatchNormalization at 0x7fa44244eda0>,
		# <keras.layers.merge.Add at 0x7fa44245d5c0>,
		# <keras.layers.core.Activation at 0x7fa44240aba8>
		self.conv1.copyFromKerasLayers(layers[0])
		self.bn1.copyFromKerasLayers(layers[1])
		self.conv2.copyFromKerasLayers(layers[3])
		self.bn2.copyFromKerasLayers(layers[4])
		self.conv3.copyFromKerasLayers(layers[6])
		self.bn3.copyFromKerasLayers(layers[7])


	def get_params(self):
		params = []
		for layer in self.layers:
			params += layer.get_params()
		return params
class IdentityBlock:
    def __init__(self, mi, fm_sizes, activation=tf.nn.relu):
        assert (len(fm_sizes) == 3)
        self.session = None
        self.f = tf.nn.relu
        self.conv1 = ConvLayer(1, mi, fm_sizes[0], 1)
        self.bn1 = BatchNormLayer(fm_sizes[0])
        self.conv2 = ConvLayer(3, fm_sizes[0], fm_sizes[1], 1, 'SAME')
        self.bn2 = BatchNormLayer(fm_sizes[1])
        self.conv3 = ConvLayer(1, fm_sizes[1], fm_sizes[2], 1)
        self.bn3 = BatchNormLayer(fm_sizes[2])
        self.layers = [
            self.conv1,
            self.bn1,
            self.conv2,
            self.bn2,
            self.conv3,
            self.bn3,
        ]

        self.input_ = tf.placeholder(tf.float32, shape=(1, 224, 224, mi))
        self.output = self.forward(self.input_)

    def forward(self, X):
        # main branch
        FX = self.conv1.forward(X)
        FX = self.bn1.forward(FX)
        FX = self.f(FX)
        FX = self.conv2.forward(FX)
        FX = self.bn2.forward(FX)
        FX = self.f(FX)
        FX = self.conv3.forward(FX)
        FX = self.bn3.forward(FX)

        return self.f(FX + X)

    def predict(self, X):
        assert (self.session is not None)
        return self.session.run(self.output, feed_dict={self.input_: X})

    def set_session(self, session):

        self.session = session
        self.conv1.session = session
        self.bn1.session = session
        self.conv2.session = session
        self.bn2.session = session
        self.conv3.session = session
        self.bn3.session = session

    def copyFromKerasLayers(self, layers):
        assert (len(layers) == 10)
        self.conv1.copyFromKerasLayers(layers[0])
        self.bn1.copyFromKerasLayers(layers[1])
        self.conv2.copyFromKerasLayers(layers[3])
        self.bn2.copyFromKerasLayers(layers[4])
        self.conv3.copyFromKerasLayers(layers[6])
        self.bn3.copyFromKerasLayers(layers[7])

    def get_params(self):
        params = []
        for layer in self.layers:
            params += layer.get_params()
        return params
class IdentityBlock:
  def __init__(self, mi, fm_sizes, activation=tf.nn.relu):
    # conv1, conv2, conv3
    # note: # feature maps shortcut = # feauture maps conv 3
    assert(len(fm_sizes) == 3)

    # note: kernel size in 2nd conv is always 3
    #       so we won't bother including it as an arg

    self.session = None
    self.f = tf.nn.relu
    
    # init main branch
    # Conv -> BN -> F() ---> Conv -> BN -> F() ---> Conv -> BN
    self.conv1 = ConvLayer(1, mi, fm_sizes[0], 1)
    self.bn1   = BatchNormLayer(fm_sizes[0])
    self.conv2 = ConvLayer(3, fm_sizes[0], fm_sizes[1], 1, 'SAME')
    self.bn2   = BatchNormLayer(fm_sizes[1])
    self.conv3 = ConvLayer(1, fm_sizes[1], fm_sizes[2], 1)
    self.bn3   = BatchNormLayer(fm_sizes[2])

    # in case needed later
    self.layers = [
      self.conv1, self.bn1,
      self.conv2, self.bn2,
      self.conv3, self.bn3,
    ]

    # this will not be used when input passed in from
    # a previous layer
    self.input_ = tf.placeholder(tf.float32, shape=(1, 224, 224, mi))
    self.output = self.forward(self.input_)

  def forward(self, X):
    # main branch
    FX = self.conv1.forward(X)
    FX = self.bn1.forward(FX)
    FX = self.f(FX)
    FX = self.conv2.forward(FX)
    FX = self.bn2.forward(FX)
    FX = self.f(FX)
    FX = self.conv3.forward(FX)
    FX = self.bn3.forward(FX)

    return self.f(FX + X)

  def predict(self, X):
    assert(self.session is not None)
    return self.session.run(
      self.output,
      feed_dict={self.input_: X}
    )

  def set_session(self, session):
    # need to make this a session
    # so assignment happens on sublayers too
    self.session = session
    self.conv1.session = session
    self.bn1.session = session
    self.conv2.session = session
    self.bn2.session = session
    self.conv3.session = session
    self.bn3.session = session

  def copyFromKerasLayers(self, layers):
    assert(len(layers) == 10)
    # <keras.layers.convolutional.Conv2D at 0x7fa44255ff28>,
    # <keras.layers.normalization.BatchNormalization at 0x7fa44250e7b8>,
    # <keras.layers.core.Activation at 0x7fa44252d9e8>,
    # <keras.layers.convolutional.Conv2D at 0x7fa44253af60>,
    # <keras.layers.normalization.BatchNormalization at 0x7fa4424e4f60>,
    # <keras.layers.core.Activation at 0x7fa442494828>,
    # <keras.layers.convolutional.Conv2D at 0x7fa4424a2da0>,
    # <keras.layers.normalization.BatchNormalization at 0x7fa44244eda0>,
    # <keras.layers.merge.Add at 0x7fa44245d5c0>,
    # <keras.layers.core.Activation at 0x7fa44240aba8>
    self.conv1.copyFromKerasLayers(layers[0])
    self.bn1.copyFromKerasLayers(layers[1])
    self.conv2.copyFromKerasLayers(layers[3])
    self.bn2.copyFromKerasLayers(layers[4])
    self.conv3.copyFromKerasLayers(layers[6])
    self.bn3.copyFromKerasLayers(layers[7])

  def get_params(self):
    params = []
    for layer in self.layers:
      params += layer.get_params()
    return params