Esempio n. 1
0
  def __init__(self, mark=None):
    # Model mark usually helps to decide the folder name
    # TODO: need to be refactored
    self.mark = hub.mark or mark
    assert mark is not None
    if hub.prefix is not None: self.mark = hub.prefix + self.mark
    if hub.suffix is not None: self.mark += hub.suffix
    if hub.script_suffix is not None: self.mark += hub.script_suffix
    # TODO: set prune iteration number.
    #       At this time configs conflicts are not smoothed.
    if hub.prune_on or hub.pruning_rate_fc > 0:
      self.mark += '_pr{}'.format(hub.pruning_iterations)
    hub.mark = self.mark

    # Each model has an agent to deal with some tensorflow stuff
    self.agent = Agent(self)

    # Define slots
    # 2020-6-10 | William |
    #   outputs should be a Group which is more general for error injection
    #   tframe 2.0 should be using such way to describe a Model
    self._outputs = TensorSlot(self)

    # Compromising way to enable additional error injection
    self._forms_for_injection = []

    self._metrics_manager = MetricsManager(self)

    self._validation_summary = SummarySlot(self)
    self._batch_val_summ = IndependentSummarySlot(self, 'batch_metric_summ')

    self._loss = TensorSlot(self, 'Loss')
    self._train_step = OperationSlot(self)
    self._train_step_summary = SummarySlot(self)

    self.validate_group = Group(
      self, self._validation_summary, name='Validate-group')

    self._update_group = Group(
      self, self._loss, self._train_step, self._train_step_summary,
      name='Update-group')

    # Slots for exporting np values to note
    self.grads_slot = NestedTensorSlot(self, 'Gradients')
    self.general_tensor_slot = NestedTensorSlot(self, 'General-Tensor')

    # Private attributes
    self._default_net = None  # TODO to be removed
    self._optimizer = None
    self._built = False
    self._scheme = None

    # Public attributes
    self.counter = None
    self.rounds = None
    self.launched = False

    # Quantities
    self.loss_quantity = None
Esempio n. 2
0
 def __init__(self, mark=None, net_type=Feedforward):
     """
 Construct a Predictor
 :param mark: model mark
 :param net_type: \in {Feedforward, Recurrent}
 """
     if not net_type in (Feedforward, Recurrent):
         raise TypeError('!! Unknown net type')
     self.master = net_type
     # Attributes
     self._targets = TensorSlot(self, 'targets')
     self._val_targets = TensorSlot(self, 'val_targets')
     # Call parent's constructor
     net_type.__init__(self, mark)
Esempio n. 3
0
 def __init__(self, mark=None, net_type=Feedforward):
     Predictor.__init__(self, mark, net_type)
     # Private attributes
     self._probabilities = TensorSlot(self, 'Probability')
     self._evaluation_group = Group(self,
                                    self._metric,
                                    self._probabilities,
                                    name='evaluation group')
Esempio n. 4
0
    def __init__(self, mark=None):
        # Model mark usually helps to decide the folder name
        self.mark = hub.mark or mark
        assert mark is not None

        # Each model has an agent to deal with some tensorflow stuff
        self.agent = Agent(self)

        # Define slots
        self._outputs = TensorSlot(self)

        self._metric = Metric(self, 'metric')
        self._validation_summary = SummarySlot(self)
        self._batch_val_summ = IndependentSummarySlot(self,
                                                      'batch_metric_summ')
        self._validate_group = Group(self,
                                     self._metric,
                                     self._validation_summary,
                                     name='Validate-group')

        self._loss = TensorSlot(self, 'Loss')
        self._train_step = OperationSlot(self)
        self._train_step_summary = SummarySlot(self)
        self._update_group = Group(self,
                                   self._loss,
                                   self._metric,
                                   self._train_step,
                                   self._train_step_summary,
                                   name='Update-group')

        # Private attributes
        self._default_net = None
        self._optimizer = None
        self._built = False
        self._scheme = None

        # Public attributes
        self.counter = None
        self.launched = False
Esempio n. 5
0
 def __init__(self, mark=None, net_type=Feedforward):
     Predictor.__init__(self, mark, net_type)
     # Private attributes
     self._probabilities = TensorSlot(self, 'Probability')
Esempio n. 6
0
    def _build(self,
               loss='cross_entropy',
               optimizer=None,
               metric=None,
               metric_is_like_loss=True,
               metric_name='Metric'):
        Feedforward._build(self)
        # Check shapes of branch outputs
        output_shape = self._check_branch_outputs()
        # Initiate targets placeholder
        self._plug_target_in(output_shape)

        # Define output tensors
        for i, output in enumerate(self.branch_outputs):
            if i == 0 or not self.strict_residual:
                output_tensor = output
            else:
                output_tensor = output + self._boutputs[i - 1].tensor
            slot = TensorSlot(self, name='output_{}'.format(i + 1))
            slot.plug(output_tensor)
            self._boutputs.append(slot)

        # Define loss tensors
        loss_function = losses.get(loss)
        with tf.name_scope('Loss'):
            for i, output in enumerate(self._boutputs):
                assert isinstance(output, TensorSlot)
                loss_tensor = loss_function(self._targets.tensor,
                                            output.tensor)
                slot = TensorSlot(self, name='loss_{}'.format(i + 1))
                slot.plug(loss_tensor)
                self._losses.append(slot)
                # Add summary
                if hub.summary:
                    name = 'loss_sum_{}'.format(i + 1)
                    sum_slot = SummarySlot(self, name)
                    sum_slot.plug(tf.summary.scalar(name, loss_tensor))
                    self._train_step_summaries.append(sum_slot)

        # Define metric tensors
        metric_function = metrics.get(metric)
        if metric_function is not None:
            with tf.name_scope('Metric'):
                for i, output in enumerate(self._boutputs):
                    assert isinstance(output, TensorSlot)
                    metric_tensor = metric_function(self._targets.tensor,
                                                    output.tensor)
                    slot = Metric(self, name='metric_{}'.format(i + 1))
                    slot.plug(metric_tensor,
                              as_loss=metric_is_like_loss,
                              symbol='{}{}'.format(metric_name, i + 1))
                    self._metrics.append(slot)
                    # Add summary
                    if hub.summary:
                        name = 'metric_sum_{}'.format(i + 1)
                        sum_slot = SummarySlot(self, name)
                        sum_slot.plug(tf.summary.scalar(name, metric_tensor))
                        self._validation_summaries.append(sum_slot)

        # Define train step
        self._define_train_step(optimizer)

        # Define groups
        # TODO when train a single branch with summary on, error may occur
        # .. due to that the higher branch summary can not get its value
        act_summaries = []
        if hub.monitor_preact:
            slot = SummarySlot(self, 'act_summary')
            slot.plug(
                tf.summary.merge(tf.get_collection(
                    pedia.train_step_summaries)))
            act_summaries.append(slot)
        self._update_group = Group(self, *self._losses, *self._train_steps,
                                   *self._train_step_summaries, *act_summaries)
        self._validate_group = Group(self, *self._metrics,
                                     *self._validation_summaries)