Ejemplo n.º 1
0
def test_elapsed_real_time():
    # Speed drop the first 5 observations
    timer = ElapsedRealTime()

    task = TaskMock(epochs=12,
                    steps=12,
                    callback=stop_after(None, None, sleep_time=0.01))
    task.metrics.append(timer)
    task.fit()

    assert timer.value()['elapsed_time'] - 12 * 12 * 0.01 < 0.1
Ejemplo n.º 2
0
def show_progress():
    epochs = 4
    steps = 12

    # Speed drop the first 5 observations
    speed = Speed()
    progress_default = ProgressView(speed)
    progress_epoch_guess = ProgressView(speed, max_epochs=epochs)
    progress_epoch = ProgressView(speed, max_epochs=epochs, max_steps=steps)
    progress_steps = ProgressView(speed, max_steps=epochs * steps)

    progress_default.print_fun = named_print('default')
    progress_epoch_guess.print_fun = named_print('  guess')
    progress_epoch.print_fun = named_print('  epoch')
    progress_steps.print_fun = named_print('   step')

    task = TaskMock(callback=stop_after(None, None, sleep_time=1),
                    epochs=epochs,
                    steps=steps)

    task.metrics.append(speed)
    task.metrics.append(ElapsedRealTime())
    task.metrics.append(progress_default)
    task.metrics.append(progress_epoch_guess)
    task.metrics.append(progress_epoch)
    task.metrics.append(progress_steps)
    task.fit()
Ejemplo n.º 3
0
    def __init__(self,
                 detector,
                 optimizer,
                 lr_scheduler,
                 dataloader,
                 criterion=None,
                 device=None,
                 storage=None):
        super(ObjectDetection, self).__init__(device=device)

        self._first_epoch = 0
        self.current_epoch = 0
        self.detector = detector
        self.optimizer = optimizer
        self.lr_scheduler = lr_scheduler
        self.dataloader = dataloader
        self.criterion = criterion
        self.storage = storage

        self.metrics.append(ElapsedRealTime().every(batch=1))
        self.metrics.append(SampleCount().every(batch=1, epoch=1))
        speed = Speed()
        self.metrics.append(speed)
        self.metrics.append(ProgressView(speed))
        self.metrics.append(OnlineLoss())

        if storage:
            self.metrics.append(CheckPointer(storage=storage))
Ejemplo n.º 4
0
    def __init__(self, classifier, optimizer, lr_scheduler, dataloader, criterion=None, device=None,
                 storage=None, preprocessor=None, metrics=None):
        super(Classification, self).__init__(device=device)
        criterion = select(criterion, CrossEntropyLoss())

        self._first_epoch = 0
        self.current_epoch = 0
        self.classifier = classifier
        self.optimizer = optimizer
        self.lr_scheduler = lr_scheduler
        self.dataloader = dataloader
        self.criterion = criterion
        self.preprocessor = Preprocessor()
        # ------------------------------------------------------------------

        self.metrics.append(ElapsedRealTime().every(batch=1))
        self.metrics.append(SampleCount().every(batch=1, epoch=1))
        self.metrics.append(OnlineTrainAccuracy())
        self.metrics.append(Speed())

        # All metrics must be before ProgressView and CheckPointer
        if metrics:
            for metric in metrics:
                self.metrics.append(metric)

        self.metrics.append(ProgressView(self.metrics.get('Speed')))

        if storage:
            self.metrics.append(CheckPointer(storage=storage))
        # ------------------------------------------------------------------

        if preprocessor is not None:
            self.preprocessor = preprocessor

        self.hyper_parameters = {}
Ejemplo n.º 5
0
    def __init__(self,
                 dataset,
                 oracle,
                 model,
                 optimizer,
                 device,
                 criterion=SharpeRatioCriterion()):
        super(Finance, self).__init__(device=device)

        self.dataset = dataset
        self.oracle = oracle
        self.criterion = criterion
        self.model = model
        self.optimizer = optimizer

        self.metrics.append(ElapsedRealTime().every(batch=1))
        self.metrics.append(SampleCount().every(batch=1, epoch=1))
        speed = Speed()
        self.metrics.append(speed)
        self.metrics.append(ProgressView(speed))
        self.metrics.append(OnlineLoss())
        self.metrics.append(NamedMetric(name='mean_returns'))
        self.metrics.append(NamedMetric(name='std_returns'))
        self.current_epoch = 0
        self.hyper_parameters = {}
Ejemplo n.º 6
0
def main(**kwargs):
    show_dict(kwargs)

    args = Namespace(**kwargs)
    set_verbose_level(args.verbose)

    device = fetch_device()
    experiment_name = args.experiment_name.format(**kwargs)

    # save partial results here
    state_storage = StateStorage(
        folder=option('state.storage', '/tmp/olympus/classification'))

    def main_task():
        task = classification_baseline(device=device,
                                       storage=state_storage,
                                       **kwargs)

        if args.uri is not None:
            logger = metric_logger(args.uri, args.database, experiment_name)
            task.metrics.append(logger)

        return task

    space = main_task().get_space()

    # If space is not empty we search the best hyper parameters
    params = {}
    if space:
        show_dict(space)
        hpo = HPOptimizer('hyperband',
                          space=space,
                          fidelity=Fidelity(args.min_epochs,
                                            args.epochs).to_dict())

        hpo_task = HPO(hpo, main_task)
        hpo_task.metrics.append(ElapsedRealTime())

        trial = hpo_task.fit(objective='validation_accuracy')
        print(f'HPO is done, objective: {trial.objective}')
        params = trial.params
    else:
        print('No hyper parameter missing, running the experiment...')
    # ------

    # Run the experiment with the best hyper parameters
    # -------------------------------------------------
    if params is not None:
        # Train using train + valid for the final result
        final_task = classification_baseline(device=device,
                                             **kwargs,
                                             hpo_done=True)
        final_task.init(**params)
        final_task.fit(epochs=args.epochs)

        print('=' * 40)
        print('Final Trial Results')
        show_dict(flatten(params))
        final_task.report(pprint=True, print_fun=print)
        print('=' * 40)
Ejemplo n.º 7
0
    def __init__(self, model, metrics, name=None):
        super(SklearnTask, self).__init__()
        self.model = model

        # Measure the time spent training
        self.metrics.name = name
        self.metrics.append(ElapsedRealTime().every(batch=1))
        self.metrics.append(SampleCount().every(batch=1))
        for metric in metrics:
            self.metrics.append(metric)
Ejemplo n.º 8
0
    def __init__(self, models, create_subtask_metrics):
        super(SklearnEnsembleTask, self).__init__()
        self.name = ''
        self.models = models

        self.tasks = OrderedDict()
        for name, model in sorted(models.items()):
            self.tasks[name] = SklearnTask(model, create_subtask_metrics(name), name=name)

        # Measure the time spent training
        self.metrics.append(ElapsedRealTime().every(batch=1))
        self.metrics.append(SampleCount().every(batch=1))
        self.metrics.append(EnsembleMetric(self))
Ejemplo n.º 9
0
    def __init__(self,
                 model: AbstractActorCritic,
                 dataloader,
                 optimizer,
                 lr_scheduler,
                 device,
                 ppo_epoch=5,
                 ppo_batch_size=32,
                 ppo_clip_param=10,
                 ppo_max_grad_norm=1000,
                 criterion=None,
                 storage=None,
                 logger=None):
        super(PPO, self).__init__(device=device)

        if criterion is None:
            criterion = lambda x: x.sum()

        self.actor_critic = model
        self.lr_scheduler = lr_scheduler
        self.optimizer: Optimizer = optimizer
        self.criterion: Module = criterion
        self.gamma: float = 0.99
        self.eps = np.finfo(np.float32).eps.item()
        self.action_sampler: Callable[[], Distribution] = Categorical
        self.tensor_shape = None
        self.frame_count: int = 0
        self.dataloader = dataloader
        self.storage = storage
        self._first_epoch = 0
        self.current_epoch = 0

        self.ppo_epoch = ppo_epoch
        self.ppo_batch_size = ppo_batch_size
        self.ppo_clip_param = ppo_clip_param
        self.ppo_max_grad_norm = ppo_max_grad_norm

        self.metrics.append(NamedMetric(name='loss'))
        self.metrics.append(ElapsedRealTime())
        self.metrics.append(Speed())
        self.metrics.append(ProgressView(self.metrics.get('Speed')))

        if storage:
            self.metrics.append(CheckPointer(storage=storage))

        self.hyper_parameters = {}
        self.batch_size = None