def __call__(self, engine: Engine, batchdata: Dict[str, torch.Tensor]): if batchdata is None: raise ValueError("Must provide batch data for current iteration.") for j in range(self.max_interactions): inputs, _ = engine.prepare_batch(batchdata) inputs = inputs.to(engine.state.device) engine.network.eval() with torch.no_grad(): if engine.amp: with torch.cuda.amp.autocast(): predictions = engine.inferer(inputs, engine.network) else: predictions = engine.inferer(inputs, engine.network) batchdata.update({CommonKeys.PRED: predictions}) batchdata[self.key_probability] = torch.as_tensor( ([1.0 - ((1.0 / self.max_interactions) * j)] if self.train else [1.0]) * len(inputs)) batchdata = self.transforms(batchdata) return engine._iteration(engine, batchdata)
def attach(self, engine: Engine) -> None: engine.add_event_handler(Events.ITERATION_STARTED, self)
def attach(self, engine: Engine) -> None: if not engine.has_event_handler(self, Events.ITERATION_COMPLETED): engine.add_event_handler(Events.ITERATION_COMPLETED, self)
def attach(self, engine: Engine) -> None: engine.add_event_handler( Events.ITERATION_COMPLETED(every=self.interval), self, "iteration") engine.add_event_handler(Events.EPOCH_COMPLETED(every=1), self, "epoch")
def attach(self, engine: Engine) -> None: if not engine.has_event_handler(self.iteration_completed, Events.EPOCH_COMPLETED): engine.add_event_handler(Events.EPOCH_COMPLETED, self.iteration_completed)