Example #1
0
    def build(self, inputs):
        """Use config dictionary to generate all important attributes for head.

    Args:
       inputs: dictionary of the shape of input args as a dictionary of lists.
    """

        keys = [int(key) for key in inputs.keys()]
        self._min_level = min(keys)
        self._max_level = max(keys)
        self._min_depth = inputs[str(self._min_level)][-1]
        self._depths = self.get_raw_depths(self._min_depth, inputs)

        # directly connect to an input path and process it
        self.preprocessors = dict()
        # resample an input and merge it with the output of another path
        # inorder to aggregate backbone outputs
        self.resamples = dict()
        # set of convoltion layers and upsample layers that are used to
        # prepare the FPN processors for output

        for level, depth in zip(
                reversed(range(self._min_level, self._max_level + 1)),
                self._depths):

            if level == self._min_level:
                self.resamples[str(level)] = nn_blocks.PathAggregationBlock(
                    filters=depth // 2,
                    inverted=True,
                    upsample=True,
                    drop_final=self._csp_stack == 0,
                    upsample_size=2,
                    **self._base_config)
                self.preprocessors[str(level)] = _IdentityRoute()
            elif level != self._max_level:
                self.resamples[str(level)] = nn_blocks.PathAggregationBlock(
                    filters=depth // 2,
                    inverted=True,
                    upsample=True,
                    drop_final=False,
                    upsample_size=2,
                    **self._base_config)
                self.preprocessors[str(level)] = nn_blocks.DarkRouteProcess(
                    filters=depth,
                    repetitions=self._fpn_depth -
                    int(level == self._min_level),
                    block_invert=True,
                    insert_spp=False,
                    csp_stack=self._csp_stack,
                    **self._base_config)
            else:
                self.preprocessors[str(level)] = nn_blocks.DarkRouteProcess(
                    filters=depth,
                    repetitions=self._max_fpn_depth +
                    1 * int(self._csp_stack == 0),
                    insert_spp=True,
                    block_invert=False,
                    csp_stack=min(self._csp_stack, self._max_fpn_depth),
                    **self._base_config)
Example #2
0
    def test_gradient_pass_though(self, width, height, filters, repetitions,
                                  spp):
        loss = tf.keras.losses.MeanSquaredError()
        optimizer = tf.keras.optimizers.SGD()
        test_layer = nn_blocks.DarkRouteProcess(filters=filters,
                                                repetitions=repetitions,
                                                insert_spp=spp)

        if repetitions == 1:
            filter_y1 = filters
        else:
            filter_y1 = filters // 2

        init = tf.random_normal_initializer()
        x = tf.Variable(initial_value=init(shape=(1, width, height, filters),
                                           dtype=tf.float32))
        y_0 = tf.Variable(initial_value=init(shape=(1, width, height, filters),
                                             dtype=tf.float32))
        y_1 = tf.Variable(initial_value=init(
            shape=(1, width, height, filter_y1), dtype=tf.float32))

        with tf.GradientTape() as tape:
            x_hat_0, x_hat_1 = test_layer(x)
            grad_loss_0 = loss(x_hat_0, y_0)
            grad_loss_1 = loss(x_hat_1, y_1)
        grad = tape.gradient([grad_loss_0, grad_loss_1],
                             test_layer.trainable_variables)
        optimizer.apply_gradients(zip(grad, test_layer.trainable_variables))

        self.assertNotIn(None, grad)
        return
Example #3
0
 def test_pass_through(self, width, height, filters, repetitions, spp):
   x = tf.keras.Input(shape=(width, height, filters))
   test_layer = nn_blocks.DarkRouteProcess(
       filters=filters, repetitions=repetitions, insert_spp=spp)
   outx = test_layer(x)
   self.assertLen(outx, 2, msg='len(outx) != 2')
   if repetitions == 1:
     filter_y1 = filters
   else:
     filter_y1 = filters // 2
   self.assertAllEqual(
       outx[1].shape.as_list(), [None, width, height, filter_y1])
   self.assertAllEqual(
       filters % 2,
       0,
       msg='Output of a DarkRouteProcess layer has an odd number of filters')
   self.assertAllEqual(outx[0].shape.as_list(), [None, width, height, filters])
Example #4
0
    def build(self, inputs):
        """Use config dictionary to generate all important attributes for head.

    Args:
      inputs: dictionary of the shape of input args as a dictionary of lists.
    """

        # define the key order
        keys = [int(key) for key in inputs.keys()]
        self._min_level = min(keys)
        self._max_level = max(keys)
        self._min_depth = inputs[str(self._min_level)][-1]
        self._depths = self.get_raw_depths(self._min_depth, inputs)

        # directly connect to an input path and process it
        self.preprocessors = dict()
        # resample an input and merge it with the output of another path
        # inorder to aggregate backbone outputs
        self.resamples = dict()

        # FPN will reverse the key process order for the backbone, so we need
        # adjust the order that objects are created and processed to adjust for
        # this. not using an FPN will directly connect the decoder to the backbone
        # therefore the object creation order needs to be done from the largest
        # to smallest level.
        if self._fpn_input:
            # process order {... 3, 4, 5}
            self._iterator = range(self._min_level, self._max_level + 1)
            self._check = lambda x: x < self._max_level
            self._key_shift = lambda x: x + 1
            self._input = self._min_level
            downsample = True
            upsample = False
        else:
            # process order {5, 4, 3, ...}
            self._iterator = list(
                reversed(range(self._min_level, self._max_level + 1)))
            self._check = lambda x: x > self._min_level
            self._key_shift = lambda x: x - 1
            self._input = self._max_level
            downsample = False
            upsample = True

        if self._csp_stack == 0:
            proc_filters = lambda x: x
            resample_filters = lambda x: x // 2
        else:
            proc_filters = lambda x: x * 2
            resample_filters = lambda x: x
        for level, depth in zip(self._iterator, self._depths):
            if level == self._input:
                self.preprocessors[str(level)] = nn_blocks.DarkRouteProcess(
                    filters=proc_filters(depth),
                    repetitions=self._max_level_process_len,
                    insert_spp=self._embed_spp,
                    block_invert=False,
                    insert_sam=self._use_spatial_attention,
                    csp_stack=self._csp_stack,
                    **self._base_config)
            else:
                self.resamples[str(level)] = nn_blocks.PathAggregationBlock(
                    filters=resample_filters(depth),
                    upsample=upsample,
                    downsample=downsample,
                    inverted=False,
                    drop_final=self._csp_stack == 0,
                    **self._base_config)
                self.preprocessors[str(level)] = nn_blocks.DarkRouteProcess(
                    filters=proc_filters(depth),
                    repetitions=self._path_process_len,
                    insert_spp=False,
                    insert_sam=self._use_spatial_attention,
                    csp_stack=self._csp_stack,
                    **self._base_config)