Пример #1
0
    def build(self,
              loss='cross_entropy',
              lr_list=None,
              optimizer=None,
              metric=None,
              metric_name='Metric'):
        if self._identity_initial:
            self._initial_define()
        Feedforward.build(self)
        # Check branch shapes
        output_shape = self.outputs.get_shape().as_list()
        for b_out in self.branch_outputs:
            assert isinstance(b_out, tf.Tensor)
            if b_out.get_shape().as_list() != output_shape:
                raise ValueError(
                    '!! Branch outputs in bamboo should have the same'
                    ' shape as the trunk output')
        # Initiate targets and add it to collection
        self._targets = tf.placeholder(self.outputs.dtype,
                                       output_shape,
                                       name='targets')
        tf.add_to_collection(pedia.default_feed_dict, self._targets)

        # Generate output list
        output_list = self.branch_outputs + [self.outputs]

        # Define losses
        loss_function = losses.get(loss)
        with tf.name_scope('Loss'):
            # Add branch outputs
            for output in output_list:
                self._losses.append(loss_function(self._targets, output))

        # Define metrics
        metric_function = metrics.get(metric)
        if metric_function is not None:
            pedia.memo[pedia.metric_name] = metric_name
            with tf.name_scope('Metric'):
                for output in output_list:
                    self._metrics.append(metric_function(
                        self._targets, output))

        # Define train step
        self._define_train_step(optimizer)

        # Sanity check
        assert len(self._losses) == len(
            self._metrics) == len(self.branch_outputs) + 1

        # Print status and model structure
        self.show_building_info(FeedforwardNet=self)

        # Launch session
        self.launch_model(FLAGS.overwrite and FLAGS.train)

        # Set built flag
        self._output_list = output_list
        self._built = True
Пример #2
0
    def _build(self,
               optimizer=None,
               loss='euclid',
               metric=None,
               metric_is_like_loss=True,
               metric_name='Metric',
               **kwargs):
        # Call parent's build method
        # Usually output tensor has been plugged into Model._outputs slot
        self.master._build(self)
        assert self.outputs.activated

        # Initiate targets and add it to collection
        self._plug_target_in(self.outputs.shape_list)

        # Define loss
        loss_function = losses.get(loss)
        with tf.name_scope('Loss'):
            if loss == 'cross_entropy':
                output_tensor = self.logits_tensor
                assert output_tensor is not None
            else:
                output_tensor = self.outputs.tensor
            loss_tensor = loss_function(self._targets.tensor, output_tensor)
            # TODO: with or without regularization loss?
            if hub.summary:
                tf.add_to_collection(
                    pedia.train_step_summaries,
                    tf.summary.scalar('loss_sum', loss_tensor))
            # Try to add regularization loss
            reg_loss = self.regularization_loss
            if reg_loss is not None: loss_tensor += reg_loss
            # Plug in
            self.loss.plug(loss_tensor)

        # Define metric
        if metric is not None:
            metric_function = metrics.get(metric)
            with tf.name_scope('Metric'):
                metric_tensor = metric_function(self._targets.tensor,
                                                self._outputs.tensor)
                self._metric.plug(metric_tensor,
                                  as_loss=metric_is_like_loss,
                                  symbol=metric_name)
                if hub.summary:
                    tf.add_to_collection(
                        pedia.validation_summaries,
                        tf.summary.scalar('metric_sum', self._metric.tensor))

        # Merge summaries
        self._merge_summaries()

        # Define train step
        self._define_train_step(optimizer)
Пример #3
0
    def build(self,
              loss='cross_entropy',
              optimizer=None,
              metric=None,
              metric_name='Metric'):
        if self._identity_initial:
            self._identity_define()

        self.outputs = self()

        # Initiate targets and add it to collection
        self._targets = tf.placeholder(
            name='targets',
            shape=self.branch_outputs[0].get_shape(),
            dtype=self.branch_outputs[0].dtype)
        tf.add_to_collection(pedia.default_feed_dict, self._targets)

        output_list = []
        for i in range(len(self.branch_outputs)):
            output_list.append(tf.add_n(self.branch_outputs[:(i + 1)]))

        # Define loss
        loss_function = losses.get(loss)
        with tf.name_scope('Loss'):
            for output in output_list:
                self._losses.append(loss_function(self._targets, output))

        # Define metrics
        metric_function = metrics.get(metric)
        if metric_function is not None:
            pedia.memo[pedia.metric_name] = metric_name
            with tf.name_scope('Metric'):
                for output in output_list:
                    self._metrics.append(metric_function(
                        self._targets, output))

        # Define_train_step
        self._define_train_step(optimizer)

        # Sanity check
        assert len(self._losses) == len(self._metrics) == len(
            self.branch_outputs)

        # Print status and model structure
        self.show_building_info(Feedforward=self)

        # Launch session
        self.launch_model(FLAGS.overwrite and FLAGS.train)

        # Set built flag
        self._output_list = output_list
        self._built = True
Пример #4
0
    def _build(self,
               loss='cross_entropy',
               optimizer=None,
               metric=None,
               metric_is_like_loss=True,
               metric_name='Metric'):
        Feedforward._build(self)
        # Check branch shapes
        output_shape = self.outputs.shape_list
        for b_out in self.branch_outputs:
            assert isinstance(b_out, tf.Tensor)
            if b_out.get_shape().as_list() != output_shape:
                raise ValueError(
                    '!! Branch outputs in bamboo should have the same'
                    ' shape as the trunk output')
        # Initiate targets and add it to collection
        self._plug_target_in(output_shape)

        # Generate output list
        self._output_list = self.branch_outputs + [self.outputs.tensor]

        # Define losses
        loss_function = losses.get(loss)
        with tf.name_scope('Loss'):
            # Add branch outputs
            for output in self._output_list:
                assert isinstance(output, tf.Tensor)
                self._losses.append(loss_function(self._targets.tensor,
                                                  output))

        # Define metrics
        metric_function = metrics.get(metric)
        if metric_function is not None:
            with tf.name_scope('Metric'):
                for output in self._output_list:
                    self._metrics.append(
                        metric_function(self._targets.tensor, output))
                self.key_metric.plug(self._metrics[-1],
                                     as_loss=metric_is_like_loss,
                                     symbol=metric_name)

        # Define train step
        self._define_train_step(optimizer)

        # Set default branch
        self.set_branch_index(-1)

        # Sanity check
        assert len(self._losses) == len(
            self._metrics) == len(self.branch_outputs) + 1
Пример #5
0
  def build(self, loss='cross_entropy', optimizer=None,
             metric=None, metric_name='Metric'):
    Feedforward.build(self)
    # Summary placeholder
    default_summaries = []
    print_summaries = []
    # Initiate targets and add it to collection
    self._targets = tf.placeholder(self.outputs.dtype, self.outputs.get_shape(),
                                   name='targets')
    tf.add_to_collection(pedia.default_feed_dict, self._targets)

    # Define loss
    loss_function = losses.get(loss)
    with tf.name_scope('Loss'):
      self._loss = loss_function(self._targets, self.outputs)
      # TODO: with or without regularization loss?
      default_summaries.append(tf.summary.scalar('loss_sum', self._loss))
      # Try to add regularization loss
      reg_loss = self.regularization_loss
      self._loss = self._loss if reg_loss is None else self._loss + reg_loss

    # Define metric
    metric_function = metrics.get(metric)
    if metric_function is not None:
      pedia.memo[pedia.metric_name] = metric_name
      with tf.name_scope('Metric'):
        self._metric = metric_function(self._targets, self.outputs)
        print_summaries.append(tf.summary.scalar('metric_sum', self._metric))

    # Merge summaries
    self._merged_summary = tf.summary.merge(default_summaries)
    if len(print_summaries) > 0:
      self._print_summary = tf.summary.merge(print_summaries)

    # Define train step
    self._define_train_step(optimizer)

    # Print status and model structure
    self.show_building_info(FeedforwardNet=self)

    # Launch session
    self.launch_model(FLAGS.overwrite and FLAGS.train)

    # Set built flag
    self._built = True
Пример #6
0
    def initialize(self, metric_list, last_only, target_tensor, output_tensor,
                   **kwargs):
        # Sanity check
        if not isinstance(metric_list, (tuple, list)):
            metric_list = [metric_list]
        # if isinstance(metric_list, str): metric_list = [metric_list] TODO: X
        checker.check_type([target_tensor, output_tensor], tf.Tensor)

        for metric in metric_list:
            assert isinstance(metric, (str, Quantity))
            if isinstance(metric, str):
                metric = metric.lower()

            # Get quality
            if metric == "loss":
                quantity = self.model.loss_quantity
                tensor = quantity.quantity
            else:
                # Initiate a new Quantity
                quantity = metrics.get(metric, last_only=last_only, **kwargs)
                tensor = quantity(target_tensor, output_tensor)

            # Create a metric_slot and plug tensor in
            name = metric if isinstance(metric, str) else quantity.name
            metric_slot = MetricSlot(self.model, name=name)
            metric_slot.plug(tensor, quantity.name, quantity_def=quantity)

            # Append metric slot to metrics list
            self.metrics.append(metric_slot)

            # Add metric slot to validate_group
            self.model.validate_group.add(metric_slot)

            # TODO to be deprecated
            if hub.summary:
                from tframe import pedia

                tf.add_to_collection(
                    pedia.validation_summaries,
                    tf.summary.scalar(quantity.name + "_sum", tensor),
                )
Пример #7
0
    def _build(self,
               loss='euclid',
               optimizer=None,
               homo_strength=1.0,
               metric=None,
               metric_name='Metric'):
        """Build model"""
        # Set summary place holder
        default_summaries = []
        print_summaries = []
        # Define output
        for order, op in self.T.items():
            self._outputs[order] = op()
        with tf.name_scope('Outputs'):
            self._output = tf.add_n(list(self._outputs.values()),
                                    name='output')

        self._target = tf.placeholder(self._output.dtype,
                                      self._output.get_shape(),
                                      name='target')
        tf.add_to_collection(pedia.default_feed_dict, self._target)

        # Define loss
        loss_function = losses.get(loss)
        with tf.name_scope('Loss'):
            # All losses in loss list will be added
            loss_list = []

            # Delta loss
            with tf.name_scope('Delta'):
                delta_loss = loss_function(self._target, self._output)
                loss_list.append(delta_loss)
                default_summaries.append(
                    tf.summary.scalar('delta_loss_sum', delta_loss))

            # Homogeneous loss
            with tf.name_scope('Homogeneous'):
                homo_list = []
                # Calculate h**o-loss for each order
                for order, op in self.T.items():
                    if order in range(1, self._max_volterra_order + 1):
                        continue
                    coef = self._alpha**order
                    truth_k = self._outputs[order] * coef
                    pred_k = op(self._input.place_holder * self._alpha)

                    # Calculate loss
                    homo_loss_k = tf.norm(truth_k - pred_k,
                                          name='home_loss_{}'.format(order))
                    # homo_list.append(numerator / coef)
                    homo_list.append(homo_loss_k)

                    # Add summary
                    default_summaries.append(
                        tf.summary.scalar('homo_loss_{}_sum'.format(order),
                                          homo_loss_k))

                # Add all homogeneous losses
                if len(homo_list) > 0:
                    homo_loss = tf.add_n(homo_list,
                                         'homo_loss') * homo_strength
                    loss_list.append(homo_loss)

            # Try to add regularization loss
            reg_list = [
                op.regularization_loss for op in self.T.values()
                if op.regularization_loss is not None
            ]
            if len(reg_list) > 0:
                with tf.name_scope('WeightNorm'):
                    weight_norm = tf.add_n(reg_list, name='reg_loss')
                    loss_list.append(weight_norm)
                    # tf.summary.scalar('reg_loss_sum', weight_norm)

            # Add all losses
            self._loss = tf.add_n(loss_list, name='loss')
            # tf.summary.scalar('total_loss', self._loss)

        # Define metric
        metric_function = metrics.get(metric)
        if metric_function is not None:
            pedia.memo[pedia.metric_name] = metric_name
            with tf.name_scope('Metric'):
                self._metric = metric_function(self._target, self._output)
                print_summaries.append(
                    tf.summary.scalar('metric_sum', self._metric))

        # Merge summaries
        self._merged_summary = tf.summary.merge(default_summaries,
                                                name='default_summaries')
        if print_summaries is not None:
            self._print_summary = tf.summary.merge(print_summaries)

        # Define train step
        self._define_train_step(optimizer)

        # Print status and model structure
        self._show_building_info(
            **
            {'T[{}]'.format(key): val
             for key, val in self.operators.items()})

        # Launch session
        self.launch_model(FLAGS.overwrite and FLAGS.train)

        # Set built flag
        self._built = True
Пример #8
0
    def _build(self,
               loss='cross_entropy',
               optimizer=None,
               metric=None,
               metric_is_like_loss=True,
               metric_name='Metric'):
        Feedforward._build(self)
        # Check shapes of branch outputs
        output_shape = self._check_branch_outputs()
        # Initiate targets placeholder
        self._plug_target_in(output_shape)

        # Define output tensors
        for i, output in enumerate(self.branch_outputs):
            if i == 0 or not self.strict_residual:
                output_tensor = output
            else:
                output_tensor = output + self._boutputs[i - 1].tensor
            slot = TensorSlot(self, name='output_{}'.format(i + 1))
            slot.plug(output_tensor)
            self._boutputs.append(slot)

        # Define loss tensors
        loss_function = losses.get(loss)
        with tf.name_scope('Loss'):
            for i, output in enumerate(self._boutputs):
                assert isinstance(output, TensorSlot)
                loss_tensor = loss_function(self._targets.tensor,
                                            output.tensor)
                slot = TensorSlot(self, name='loss_{}'.format(i + 1))
                slot.plug(loss_tensor)
                self._losses.append(slot)
                # Add summary
                if hub.summary:
                    name = 'loss_sum_{}'.format(i + 1)
                    sum_slot = SummarySlot(self, name)
                    sum_slot.plug(tf.summary.scalar(name, loss_tensor))
                    self._train_step_summaries.append(sum_slot)

        # Define metric tensors
        metric_function = metrics.get(metric)
        if metric_function is not None:
            with tf.name_scope('Metric'):
                for i, output in enumerate(self._boutputs):
                    assert isinstance(output, TensorSlot)
                    metric_tensor = metric_function(self._targets.tensor,
                                                    output.tensor)
                    slot = Metric(self, name='metric_{}'.format(i + 1))
                    slot.plug(metric_tensor,
                              as_loss=metric_is_like_loss,
                              symbol='{}{}'.format(metric_name, i + 1))
                    self._metrics.append(slot)
                    # Add summary
                    if hub.summary:
                        name = 'metric_sum_{}'.format(i + 1)
                        sum_slot = SummarySlot(self, name)
                        sum_slot.plug(tf.summary.scalar(name, metric_tensor))
                        self._validation_summaries.append(sum_slot)

        # Define train step
        self._define_train_step(optimizer)

        # Define groups
        # TODO when train a single branch with summary on, error may occur
        # .. due to that the higher branch summary can not get its value
        act_summaries = []
        if hub.monitor_preact:
            slot = SummarySlot(self, 'act_summary')
            slot.plug(
                tf.summary.merge(tf.get_collection(
                    pedia.train_step_summaries)))
            act_summaries.append(slot)
        self._update_group = Group(self, *self._losses, *self._train_steps,
                                   *self._train_step_summaries, *act_summaries)
        self._validate_group = Group(self, *self._metrics,
                                     *self._validation_summaries)