Beispiel #1
0
    def on_train_begin(self, logs={}):
        self.start_time = time.time()
        self.last_batch_time = time.time()

        deepkit.epoch(0, self.params['epochs'])
        deepkit.set_info('parameters', get_total_params(self.model))

        # self.job_backend.upload_keras_graph(self.model)

        if self.model.optimizer and hasattr(self.model.optimizer,
                                            'get_config'):
            config = self.model.optimizer.get_config()
            # deepkit.set_info('optimizer', type(self.model.optimizer).__name__)
            for i, v in config.items():
                deepkit.set_info('optimizer.' + str(i), v)

        # compatibility with keras 1.x
        if 'epochs' not in self.params and 'nb_epoch' in self.params:
            self.params['epochs'] = self.params['nb_epoch']
        if 'samples' not in self.params and 'nb_sample' in self.params:
            self.params['samples'] = self.params['nb_sample']

        xaxis = {
            # 'range': [1, self.params['epochs']],
            # 'title': u'Epoch ⇢'
        }
        yaxis = {'tickformat': '%', 'hoverformat': '%', 'rangemode': 'tozero'}

        traces = ['training', 'validation']
        if hasattr(self.model,
                   'output_layers') and len(self.model.output_layers) > 1:
            traces = []
            for output in self.model.output_layers:
                traces.append('train_' + output.name)
                traces.append('val_' + output.name)

        self.accuracy_metric = deepkit.create_metric('accuracy',
                                                     traces=traces,
                                                     xaxis=xaxis,
                                                     yaxis=yaxis)
        self.loss_metric = deepkit.create_loss_metric('loss', xaxis=xaxis)
        self.learning_rate_metric = deepkit.create_metric(
            'learning rate', traces=['start', 'end'], xaxis=xaxis)

        deepkit.epoch(0, self.params['epochs'])
        if hasattr(self.model,
                   'output_layers') and len(self.model.output_layers) > 1:
            loss_traces = []
            for output in self.model.output_layers:
                loss_traces.append('train_' + output.name)
                loss_traces.append('val_' + output.name)

            self.all_losses = deepkit.create_metric('loss_all',
                                                    xaxis=xaxis,
                                                    traces=loss_traces)
    def on_batch_begin(self, batch, logs={}):
        if 'nb_batches' not in self.current:
            batch_size = logs['size']
            if 'samples' in self.params:
                nb_batches = math.ceil(self.params['samples'] / batch_size)  # normal nb batches
            else:
                nb_batches = self.params['steps']

            self.current['nb_batches'] = nb_batches
            self.current['batch_size'] = batch_size
            deepkit.set_info('Batch size', batch_size)
Beispiel #3
0
import random

import deepkit

context = deepkit.context()
context.add_file(__file__)

test = deepkit.create_metric('test')

for i in range(100):
    deepkit.set_info(i, random.random())

total = 1_000_000;
for i in range(1_000_000):
    test.send(i, random.random())
    deepkit.epoch(i, total)

print("Bye.")