Ejemplo n.º 1
0
    def build(self,
              loss='cross_entropy',
              lr_list=None,
              optimizer=None,
              metric=None,
              metric_name='Metric'):
        if self._identity_initial:
            self._initial_define()
        Feedforward.build(self)
        # Check branch shapes
        output_shape = self.outputs.get_shape().as_list()
        for b_out in self.branch_outputs:
            assert isinstance(b_out, tf.Tensor)
            if b_out.get_shape().as_list() != output_shape:
                raise ValueError(
                    '!! Branch outputs in bamboo should have the same'
                    ' shape as the trunk output')
        # Initiate targets and add it to collection
        self._targets = tf.placeholder(self.outputs.dtype,
                                       output_shape,
                                       name='targets')
        tf.add_to_collection(pedia.default_feed_dict, self._targets)

        # Generate output list
        output_list = self.branch_outputs + [self.outputs]

        # Define losses
        loss_function = losses.get(loss)
        with tf.name_scope('Loss'):
            # Add branch outputs
            for output in output_list:
                self._losses.append(loss_function(self._targets, output))

        # Define metrics
        metric_function = metrics.get(metric)
        if metric_function is not None:
            pedia.memo[pedia.metric_name] = metric_name
            with tf.name_scope('Metric'):
                for output in output_list:
                    self._metrics.append(metric_function(
                        self._targets, output))

        # Define train step
        self._define_train_step(optimizer)

        # Sanity check
        assert len(self._losses) == len(
            self._metrics) == len(self.branch_outputs) + 1

        # Print status and model structure
        self.show_building_info(FeedforwardNet=self)

        # Launch session
        self.launch_model(FLAGS.overwrite and FLAGS.train)

        # Set built flag
        self._output_list = output_list
        self._built = True
Ejemplo n.º 2
0
    def _build(self,
               optimizer=None,
               loss='euclid',
               metric=None,
               metric_is_like_loss=True,
               metric_name='Metric',
               **kwargs):
        # Call parent's build method
        # Usually output tensor has been plugged into Model._outputs slot
        self.master._build(self)
        assert self.outputs.activated

        # Initiate targets and add it to collection
        self._plug_target_in(self.outputs.shape_list)

        # Define loss
        loss_function = losses.get(loss)
        with tf.name_scope('Loss'):
            if loss == 'cross_entropy':
                output_tensor = self.logits_tensor
                assert output_tensor is not None
            else:
                output_tensor = self.outputs.tensor
            loss_tensor = loss_function(self._targets.tensor, output_tensor)
            # TODO: with or without regularization loss?
            if hub.summary:
                tf.add_to_collection(
                    pedia.train_step_summaries,
                    tf.summary.scalar('loss_sum', loss_tensor))
            # Try to add regularization loss
            reg_loss = self.regularization_loss
            if reg_loss is not None: loss_tensor += reg_loss
            # Plug in
            self.loss.plug(loss_tensor)

        # Define metric
        if metric is not None:
            metric_function = metrics.get(metric)
            with tf.name_scope('Metric'):
                metric_tensor = metric_function(self._targets.tensor,
                                                self._outputs.tensor)
                self._metric.plug(metric_tensor,
                                  as_loss=metric_is_like_loss,
                                  symbol=metric_name)
                if hub.summary:
                    tf.add_to_collection(
                        pedia.validation_summaries,
                        tf.summary.scalar('metric_sum', self._metric.tensor))

        # Merge summaries
        self._merge_summaries()

        # Define train step
        self._define_train_step(optimizer)
Ejemplo n.º 3
0
    def build(self,
              loss='cross_entropy',
              optimizer=None,
              metric=None,
              metric_name='Metric'):
        if self._identity_initial:
            self._identity_define()

        self.outputs = self()

        # Initiate targets and add it to collection
        self._targets = tf.placeholder(
            name='targets',
            shape=self.branch_outputs[0].get_shape(),
            dtype=self.branch_outputs[0].dtype)
        tf.add_to_collection(pedia.default_feed_dict, self._targets)

        output_list = []
        for i in range(len(self.branch_outputs)):
            output_list.append(tf.add_n(self.branch_outputs[:(i + 1)]))

        # Define loss
        loss_function = losses.get(loss)
        with tf.name_scope('Loss'):
            for output in output_list:
                self._losses.append(loss_function(self._targets, output))

        # Define metrics
        metric_function = metrics.get(metric)
        if metric_function is not None:
            pedia.memo[pedia.metric_name] = metric_name
            with tf.name_scope('Metric'):
                for output in output_list:
                    self._metrics.append(metric_function(
                        self._targets, output))

        # Define_train_step
        self._define_train_step(optimizer)

        # Sanity check
        assert len(self._losses) == len(self._metrics) == len(
            self.branch_outputs)

        # Print status and model structure
        self.show_building_info(Feedforward=self)

        # Launch session
        self.launch_model(FLAGS.overwrite and FLAGS.train)

        # Set built flag
        self._output_list = output_list
        self._built = True
Ejemplo n.º 4
0
    def _build(self,
               loss='cross_entropy',
               optimizer=None,
               metric=None,
               metric_is_like_loss=True,
               metric_name='Metric'):
        Feedforward._build(self)
        # Check branch shapes
        output_shape = self.outputs.shape_list
        for b_out in self.branch_outputs:
            assert isinstance(b_out, tf.Tensor)
            if b_out.get_shape().as_list() != output_shape:
                raise ValueError(
                    '!! Branch outputs in bamboo should have the same'
                    ' shape as the trunk output')
        # Initiate targets and add it to collection
        self._plug_target_in(output_shape)

        # Generate output list
        self._output_list = self.branch_outputs + [self.outputs.tensor]

        # Define losses
        loss_function = losses.get(loss)
        with tf.name_scope('Loss'):
            # Add branch outputs
            for output in self._output_list:
                assert isinstance(output, tf.Tensor)
                self._losses.append(loss_function(self._targets.tensor,
                                                  output))

        # Define metrics
        metric_function = metrics.get(metric)
        if metric_function is not None:
            with tf.name_scope('Metric'):
                for output in self._output_list:
                    self._metrics.append(
                        metric_function(self._targets.tensor, output))
                self.key_metric.plug(self._metrics[-1],
                                     as_loss=metric_is_like_loss,
                                     symbol=metric_name)

        # Define train step
        self._define_train_step(optimizer)

        # Set default branch
        self.set_branch_index(-1)

        # Sanity check
        assert len(self._losses) == len(
            self._metrics) == len(self.branch_outputs) + 1
Ejemplo n.º 5
0
  def build(self, loss='cross_entropy', optimizer=None,
             metric=None, metric_name='Metric'):
    Feedforward.build(self)
    # Summary placeholder
    default_summaries = []
    print_summaries = []
    # Initiate targets and add it to collection
    self._targets = tf.placeholder(self.outputs.dtype, self.outputs.get_shape(),
                                   name='targets')
    tf.add_to_collection(pedia.default_feed_dict, self._targets)

    # Define loss
    loss_function = losses.get(loss)
    with tf.name_scope('Loss'):
      self._loss = loss_function(self._targets, self.outputs)
      # TODO: with or without regularization loss?
      default_summaries.append(tf.summary.scalar('loss_sum', self._loss))
      # Try to add regularization loss
      reg_loss = self.regularization_loss
      self._loss = self._loss if reg_loss is None else self._loss + reg_loss

    # Define metric
    metric_function = metrics.get(metric)
    if metric_function is not None:
      pedia.memo[pedia.metric_name] = metric_name
      with tf.name_scope('Metric'):
        self._metric = metric_function(self._targets, self.outputs)
        print_summaries.append(tf.summary.scalar('metric_sum', self._metric))

    # Merge summaries
    self._merged_summary = tf.summary.merge(default_summaries)
    if len(print_summaries) > 0:
      self._print_summary = tf.summary.merge(print_summaries)

    # Define train step
    self._define_train_step(optimizer)

    # Print status and model structure
    self.show_building_info(FeedforwardNet=self)

    # Launch session
    self.launch_model(FLAGS.overwrite and FLAGS.train)

    # Set built flag
    self._built = True
Ejemplo n.º 6
0
 def __init__(self,
              model,
              f,
              loss=None,
              loss_coef=1.0,
              name='output',
              target_key=None,
              last_only=False):
     from tframe import losses  # TODO: refactor this line
     # Sanity check
     assert isinstance(f, Function)
     assert isinstance(loss_coef, float) and loss_coef > 0
     # Currently this class should not appear in RNN codes
     assert not last_only
     # Call parent's constructor
     super().__init__(model, name=name)
     # Other attributes
     self.function = f
     self.loss_coef = loss_coef
     self.loss_quantity = losses.get(loss,
                                     last_only=last_only) if loss else None
     self.target_key = target_key
     self.target_slot = TensorSlot(model, '{}_slot'.format(name))
     self.loss_slot = TensorSlot(model, '{}_loss_slot'.format(name))
Ejemplo n.º 7
0
    def _build(self,
               loss='euclid',
               optimizer=None,
               homo_strength=1.0,
               metric=None,
               metric_name='Metric'):
        """Build model"""
        # Set summary place holder
        default_summaries = []
        print_summaries = []
        # Define output
        for order, op in self.T.items():
            self._outputs[order] = op()
        with tf.name_scope('Outputs'):
            self._output = tf.add_n(list(self._outputs.values()),
                                    name='output')

        self._target = tf.placeholder(self._output.dtype,
                                      self._output.get_shape(),
                                      name='target')
        tf.add_to_collection(pedia.default_feed_dict, self._target)

        # Define loss
        loss_function = losses.get(loss)
        with tf.name_scope('Loss'):
            # All losses in loss list will be added
            loss_list = []

            # Delta loss
            with tf.name_scope('Delta'):
                delta_loss = loss_function(self._target, self._output)
                loss_list.append(delta_loss)
                default_summaries.append(
                    tf.summary.scalar('delta_loss_sum', delta_loss))

            # Homogeneous loss
            with tf.name_scope('Homogeneous'):
                homo_list = []
                # Calculate h**o-loss for each order
                for order, op in self.T.items():
                    if order in range(1, self._max_volterra_order + 1):
                        continue
                    coef = self._alpha**order
                    truth_k = self._outputs[order] * coef
                    pred_k = op(self._input.place_holder * self._alpha)

                    # Calculate loss
                    homo_loss_k = tf.norm(truth_k - pred_k,
                                          name='home_loss_{}'.format(order))
                    # homo_list.append(numerator / coef)
                    homo_list.append(homo_loss_k)

                    # Add summary
                    default_summaries.append(
                        tf.summary.scalar('homo_loss_{}_sum'.format(order),
                                          homo_loss_k))

                # Add all homogeneous losses
                if len(homo_list) > 0:
                    homo_loss = tf.add_n(homo_list,
                                         'homo_loss') * homo_strength
                    loss_list.append(homo_loss)

            # Try to add regularization loss
            reg_list = [
                op.regularization_loss for op in self.T.values()
                if op.regularization_loss is not None
            ]
            if len(reg_list) > 0:
                with tf.name_scope('WeightNorm'):
                    weight_norm = tf.add_n(reg_list, name='reg_loss')
                    loss_list.append(weight_norm)
                    # tf.summary.scalar('reg_loss_sum', weight_norm)

            # Add all losses
            self._loss = tf.add_n(loss_list, name='loss')
            # tf.summary.scalar('total_loss', self._loss)

        # Define metric
        metric_function = metrics.get(metric)
        if metric_function is not None:
            pedia.memo[pedia.metric_name] = metric_name
            with tf.name_scope('Metric'):
                self._metric = metric_function(self._target, self._output)
                print_summaries.append(
                    tf.summary.scalar('metric_sum', self._metric))

        # Merge summaries
        self._merged_summary = tf.summary.merge(default_summaries,
                                                name='default_summaries')
        if print_summaries is not None:
            self._print_summary = tf.summary.merge(print_summaries)

        # Define train step
        self._define_train_step(optimizer)

        # Print status and model structure
        self._show_building_info(
            **
            {'T[{}]'.format(key): val
             for key, val in self.operators.items()})

        # Launch session
        self.launch_model(FLAGS.overwrite and FLAGS.train)

        # Set built flag
        self._built = True
Ejemplo n.º 8
0
    def _build(self,
               loss='cross_entropy',
               optimizer=None,
               metric=None,
               metric_is_like_loss=True,
               metric_name='Metric'):
        Feedforward._build(self)
        # Check shapes of branch outputs
        output_shape = self._check_branch_outputs()
        # Initiate targets placeholder
        self._plug_target_in(output_shape)

        # Define output tensors
        for i, output in enumerate(self.branch_outputs):
            if i == 0 or not self.strict_residual:
                output_tensor = output
            else:
                output_tensor = output + self._boutputs[i - 1].tensor
            slot = TensorSlot(self, name='output_{}'.format(i + 1))
            slot.plug(output_tensor)
            self._boutputs.append(slot)

        # Define loss tensors
        loss_function = losses.get(loss)
        with tf.name_scope('Loss'):
            for i, output in enumerate(self._boutputs):
                assert isinstance(output, TensorSlot)
                loss_tensor = loss_function(self._targets.tensor,
                                            output.tensor)
                slot = TensorSlot(self, name='loss_{}'.format(i + 1))
                slot.plug(loss_tensor)
                self._losses.append(slot)
                # Add summary
                if hub.summary:
                    name = 'loss_sum_{}'.format(i + 1)
                    sum_slot = SummarySlot(self, name)
                    sum_slot.plug(tf.summary.scalar(name, loss_tensor))
                    self._train_step_summaries.append(sum_slot)

        # Define metric tensors
        metric_function = metrics.get(metric)
        if metric_function is not None:
            with tf.name_scope('Metric'):
                for i, output in enumerate(self._boutputs):
                    assert isinstance(output, TensorSlot)
                    metric_tensor = metric_function(self._targets.tensor,
                                                    output.tensor)
                    slot = Metric(self, name='metric_{}'.format(i + 1))
                    slot.plug(metric_tensor,
                              as_loss=metric_is_like_loss,
                              symbol='{}{}'.format(metric_name, i + 1))
                    self._metrics.append(slot)
                    # Add summary
                    if hub.summary:
                        name = 'metric_sum_{}'.format(i + 1)
                        sum_slot = SummarySlot(self, name)
                        sum_slot.plug(tf.summary.scalar(name, metric_tensor))
                        self._validation_summaries.append(sum_slot)

        # Define train step
        self._define_train_step(optimizer)

        # Define groups
        # TODO when train a single branch with summary on, error may occur
        # .. due to that the higher branch summary can not get its value
        act_summaries = []
        if hub.monitor_preact:
            slot = SummarySlot(self, 'act_summary')
            slot.plug(
                tf.summary.merge(tf.get_collection(
                    pedia.train_step_summaries)))
            act_summaries.append(slot)
        self._update_group = Group(self, *self._losses, *self._train_steps,
                                   *self._train_step_summaries, *act_summaries)
        self._validate_group = Group(self, *self._metrics,
                                     *self._validation_summaries)
Ejemplo n.º 9
0
    def _build(self, optimizer=None, loss='euclid', metric=None, **kwargs):
        # For some RNN predictors, their last step is counted as the only output
        #   e.g. RNNs for sequence classification tasks
        last_only = False
        if 'last_only' in kwargs.keys():
            last_only = kwargs.pop('last_only')
            if hub.use_gather_indices:
                # Initiate gather_indices placeholder
                assert context.gather_indices is None
                context.gather_indices = tf.placeholder(
                    tf.int32, [None, 2], 'gather_indices')
                tf.add_to_collection(pedia.default_feed_dict,
                                     context.gather_indices)

        # Get loss quantity before building
        self.loss_quantity = losses.get(loss, last_only)
        # This is for calculating loss inside a while-loop
        context.loss_function = self.loss_quantity.function

        # Call parent's build method to link network
        # Usually output tensor has been plugged into Model._outputs slot
        self.master._build(self)
        assert self.outputs.activated

        # Initiate targets and add it to collection
        self._plug_target_in(self.outputs.shape_list)

        # Define loss. Some tensorflow apis only support calculating logits
        with tf.name_scope('Loss'):
            loss_tensor = self.loss_quantity(self._targets.tensor,
                                             self.outputs.tensor)

            # TODO: with or without regularization loss?
            if hub.summary:
                tf.add_to_collection(
                    pedia.train_step_summaries,
                    tf.summary.scalar('loss_sum', loss_tensor))
            # Try to add extra loss which is calculated by the corresponding net
            # .. regularization loss is included
            if self.extra_loss is not None:
                loss_tensor = tf.add(loss_tensor, self.extra_loss)
            # Plug in
            self.loss.plug(loss_tensor, quantity_def=self.loss_quantity)

        # <monitor_grad_step_02: register loss and plug grad_ops in>
        if hub.monitor_weight_grads:
            context.monitor.register_loss(loss_tensor)
            self.grads_slot.plug(context.monitor.grad_ops_list)
            self._update_group.add(self.grads_slot)

        # Monitor general tensors (currently only activation is included)
        if hub.export_activations and context.monitor.tensor_fetches:
            self.general_tensor_slot.plug(context.monitor.tensor_fetches)
            self._update_group.add(self.general_tensor_slot)

        # Initialize metric
        if metric is not None:
            checker.check_type_v2(metric, (str, Quantity))
            # Create placeholder for val_targets if necessary
            # Common targets will be plugged into val_target slot by default
            self._plug_val_target_in(kwargs.get('val_targets', None))

            with tf.name_scope('Metric'):
                self._metrics_manager.initialize(metric, last_only,
                                                 self._val_targets.tensor,
                                                 self._outputs.tensor,
                                                 **kwargs)

        # Merge summaries
        self._merge_summaries()

        # Define train step
        self._define_train_step(optimizer)