Exemple #1
0
    def on_epoch_end(self, epoch, logs):
        if self.every_epoch:
            met_dict = {'epoch': epoch}
            for key, value in logs.items():
                logs[key] = round(value, 4)
            met_dict.update(logs)
            log_metrics(met_dict, verbose=False)

        elif epoch == self.params['epochs'] - 1:
            met_dict = {}
            for key, value in logs.items():
                logs[key] = round(value, 4)
            met_dict.update(logs)
            log_metrics(met_dict, verbose=False)

            if self.notify_slack:
                result = {
                    'message':
                    'Training complete after ' + str(self.params['epochs']) +
                    ' epochs.',
                    'metrics':
                    met_dict
                }
                if self.hyperparams:
                    result['hyperparams'] = self.hyperparams
                notify(json.dumps(result, indent=2, cls=Encoder),
                       verbose=False,
                       safe=True)
Exemple #2
0
def test_log_metrics_verbose(mock_api_post_block, capsys):
    data = {'acc': 0.89, 'val_acc': 0.86}
    log_metrics(data)
    captured = capsys.readouterr()

    expected_result = '[jovian] Metrics logged.'
    assert expected_result == captured.out.strip()
Exemple #3
0
    def test_log_metrics(self, mock_api_post_block):
        data = {'acc': 0.89, 'val_acc': 0.86}
        expected_result = [('fake_slug_metrics_1', 'metrics', {}),
                           ('fake_slug_metrics_2', 'metrics', {}),
                           ('fake_slug_hyperparams_1', 'hyperparams', {}),
                           ('fake_slug_hyperparams_2', 'hyperparams', {}),
                           ('fake_slug_3', 'metrics', data)]

        log_metrics(acc=0.89, val_acc=0.86)
        self.assertEqual(jovian.utils.records._data_blocks, expected_result)
Exemple #4
0
    def on_epoch_end(self, epoch: int, smooth_loss: Tensor, last_metrics: list, **ka):
        met_values = [epoch,
                      smooth_loss.item()]  # smoothened avg. train loss for the epoch

        if self.valid_set:
            # last_metrics is a list with first elem as valid_loss followed by all
            # the metrics of the learner
            met_values.extend([str(last_metrics[0])] + [i.item()
                                                        for i in last_metrics[1:]])
        log_metrics(dict(zip(self.met_names, met_values)))