Exemplo n.º 1
0
    def init(self, optimizer=None, model=None, uid=None):
        """
        Parameters
        ----------
        optimizer: Dict
            Optimizer hyper parameters!s

        model: Dict
            model hyper parameters

        uid: Optional[str]
            trial id to use for logging.
            When using orion usually it already created a trial for us we just need to append to it
        """
        optimizer = select(optimizer, {})
        model = select(model, {})

        self.model.init(**model)

        # list of all parameters this task has
        parameters = list()
        parameters.append({'params': self.model.parameters()})

        # We need to set the device now so optimizer receive cuda tensors
        self.set_device(self.device)
        self.optimizer.init(self.model.parameters(), **optimizer)

        self.hyper_parameters = {'optimizer': optimizer, 'model': model}

        # Get all hyper parameters even the one that were set manually
        hyperparameters = self.get_current_space()

        # Trial Creation and Trial resume
        self.metrics.new_trial(hyperparameters, uid)
        self.set_device(self.device)
Exemplo n.º 2
0
    def init(self,
             gamma=0.99,
             optimizer=None,
             lr_schedule=None,
             model=None,
             uid=None):
        """
        Parameters
        ----------
        optimizer: Dict
            Optimizer hyper parameters

        lr_schedule: Dict
            lr schedule hyper parameters

        model: Dict
            model hyper parameters

        gamma: float
            reward discount factor

        trial: Optional[str]
            trial id to use for logging.
            When using orion usually it already created a trial for us we just need to append to it
        """

        optimizer = select(optimizer, {})
        lr_schedule = select(lr_schedule, {})
        model = select(model, {})
        self.gamma = gamma

        self.actor_critic.init(**model)

        # We need to set the device now so optimizer receive cuda tensors
        self.set_device(self.device)
        self.optimizer.init(self.actor_critic.parameters(),
                            override=True,
                            **optimizer)
        self.lr_scheduler.init(self.optimizer, override=True, **lr_schedule)

        self.hyper_parameters = {
            'optimizer': optimizer,
            'lr_schedule': lr_schedule,
            'model': model
        }

        parameters = {}
        parameters.update(optimizer)
        parameters.update(lr_schedule)
        parameters.update(model)

        self.metrics.new_trial(parameters, uid)
        self.set_device(self.device)
Exemplo n.º 3
0
    def __init__(self, classifier, optimizer, lr_scheduler, dataloader, criterion=None, device=None,
                 storage=None, preprocessor=None, metrics=None):
        super(Classification, self).__init__(device=device)
        criterion = select(criterion, CrossEntropyLoss())

        self._first_epoch = 0
        self.current_epoch = 0
        self.classifier = classifier
        self.optimizer = optimizer
        self.lr_scheduler = lr_scheduler
        self.dataloader = dataloader
        self.criterion = criterion
        self.preprocessor = Preprocessor()
        # ------------------------------------------------------------------

        self.metrics.append(ElapsedRealTime().every(batch=1))
        self.metrics.append(SampleCount().every(batch=1, epoch=1))
        self.metrics.append(OnlineTrainAccuracy())
        self.metrics.append(Speed())

        # All metrics must be before ProgressView and CheckPointer
        if metrics:
            for metric in metrics:
                self.metrics.append(metric)

        self.metrics.append(ProgressView(self.metrics.get('Speed')))

        if storage:
            self.metrics.append(CheckPointer(storage=storage))
        # ------------------------------------------------------------------

        if preprocessor is not None:
            self.preprocessor = preprocessor

        self.hyper_parameters = {}
Exemplo n.º 4
0
    def init(self, optimizer=None, lr_schedule=None, model=None, uid=None):
        optimizer = select(optimizer, {})
        lr_schedule = select(lr_schedule, {})
        model = select(model, {})

        self.detector.init(**model)

        self.set_device(self.device)
        self.optimizer.init(self.detector.parameters(),
                            override=True,
                            **optimizer)
        self.lr_scheduler.init(self.optimizer, override=True, **lr_schedule)

        parameters = {}
        parameters.update(optimizer)
        parameters.update(lr_schedule)
        parameters.update(model)

        # Trial Creation and Trial resume
        self.metrics.new_trial(parameters, uid)
        self.set_device(self.device)