def build(self, optimizer=None, metric='accuracy', **kwargs): Predictor.build(self, optimizer=optimizer, loss='cross_entropy', metric=metric, metric_is_like_loss=False, metric_name='Accuracy')
def _print_progress(self, epc, start_time, info_dict, **kwargs): # Sanity check assert self._metric is not None if self._validation_set is None: raise ValueError('Validation set is None') # Reset dict info_dict = {} data_batch = kwargs.get('data_batch', None) assert data_batch is not None feed_dict = self._get_default_feed_dict(data_batch, is_training=False) sum_train_acc, train_acc = self._session.run( [self._sum_train_acc, self._metric], feed_dict=feed_dict) feed_dict = self._get_default_feed_dict(self._validation_set, is_training=False) sum_val_acc, val_acc = self._session.run( [self._sum_val_acc, self._metric], feed_dict=feed_dict) self._summary_writer.add_summary(sum_train_acc, self._counter) self._summary_writer.add_summary(sum_val_acc, self._counter) info_dict['Training Accuracy'] = train_acc info_dict['Validation Accuracy'] = val_acc # Call predictor's _print_progress Predictor._print_progress(self, epc, start_time, info_dict)
def __init__(self, mark=None, net_type=Feedforward): Predictor.__init__(self, mark, net_type) # Private attributes self._probabilities = TensorSlot(self, 'Probability') self._evaluation_group = Group(self, self._metric, self._probabilities, name='evaluation group')
def _build(self, optimizer=None, metric=None, **kwargs): # TODO: ... do some compromise hub.block_validation = True # If last layer is not softmax layer, add it to model if not (isinstance(self.last_function, Activation) and self.last_function.abbreviation == 'softmax'): self.add(Activation('softmax')) # Call parent's method to build using the default loss function # -- cross entropy Predictor._build(self, optimize=optimizer, metric=metric, **kwargs) assert self.outputs.activated # Plug tensor into probabilities slot self._probabilities.plug(self.outputs.tensor)
def build(self, optimizer=None, loss='cross_entropy', metric='accuracy', batch_metric=None, eval_metric=None, **kwargs): Predictor.build(self, optimizer=optimizer, loss=loss, metric=metric, batch_metric=batch_metric, eval_metric=eval_metric, **kwargs)
def build(self, loss='cross_entropy', optimizer=None): # TODO: ... do some compromise config.block_validation = True # Call parent's method to build using the default loss function # -- cross entropy Predictor.build(self, loss, optimizer, metric='accuracy', metric_name=pedia.Accuracy) self._sum_train_acc = tf.summary.scalar('train_acc', self._metric) self._sum_val_acc = tf.summary.scalar('val_acc', self._metric) # Find last layer if (isinstance(self.last_function, Activation) and self.last_function.abbreviation == 'softmax'): self._probabilities = self.outputs else: self._probabilities = tf.nn.softmax(self.outputs, name='probabilities')
def __init__(self, mark=None, net_type=Feedforward): Predictor.__init__(self, mark, net_type) # Private attributes self._probabilities = TensorSlot(self, 'Probability')
def __init__(self, mark=None): Predictor.__init__(self, mark) self._sum_train_acc = None self._sum_val_acc = None self._probabilities = None