def load(self, path): """ Loads saved weights into the model """ with path.open('rb') as f: weights = torch.load(f) load_model_from_dict(self, weights) return self
def load(self, path): """ Loads saved weights into the model """ with path.open('rb') as f: weights = torch.load(f) load_model_from_dict(self.align, weights) # model_params = dict(self.align.named_parameters()) # model_keys = sorted(model_params.keys()) # print(model_keys) return self
def load(archive_path=None, height=5, dim=1536, skips=0, topskips=0, k=7, cuda=True, num_targets=1): """ Builds and load a model with the specified architecture from an archive. Params: height: the number of layers in the pyramid (including bottom layer (number of downsamples = height - 1) dim: the size of the full resolution images used as input skips: the number of residual fields (from the bottom of the pyramid) to skip cuda: whether or not to move the model to the GPU """ assert archive_path is not None, "Must provide an archive" model = PyramidTransformer(size=height, dim=dim, k=k, skip=skips, topskips=topskips, num_targets=num_targets) if cuda: model = model.cuda() for p in model.parameters(): p.requires_grad = False model.train(False) print('Loading model state from {}...'.format(archive_path)) state_dict = torch.load(archive_path) load_model_from_dict(model, state_dict) print('Successfully loaded model state.') return model