def _load_model(self, exp_dir): config = load_config(os.path.join(exp_dir, 'config.json')) self.agent = AutoregressiveRNN(config) self.agent.load_checkpoint('checkpoint.pth.tar') self.model = self.agent.model self.model.eval() self.config = config
def load_exp_data(all_exp_dir): params = [] losses = [] for exp_name in os.listdir(all_exp_dir): try: exp_dir = os.path.join(all_exp_dir, exp_name) config = load_config(os.path.join(exp_dir, 'config.json')) vec = config_to_vec(config) vec['exp_name'] = exp_name summaries = load_json( os.path.join(exp_dir, 'summaries', 'all_scalars.json')) k_loss = get_key_for_metric(summaries.keys(), 'validation/loss/loss') if k_loss is None: print('Metric not foud... skipping') continue loss = np.average([x[2] for x in summaries[k_loss][-5:]]) params.append(vec) losses.append(loss) except FileNotFoundError: print('File not found... skipping') continue return params, losses
def _load_model(self, exp_dir): config = load_config(os.path.join(exp_dir, 'config.json')) # config['cuda'] = False config['gpu_device'] = 9 self.agent = FeedforwardNN(config) self.agent.load_checkpoint('checkpoint.pth.tar') self.model = self.agent.model self.model.eval() self.config = config
def __init__(self, exp_dir): self.config = load_config(os.path.join(exp_dir, 'config.json')) self._load_model(exp_dir) # self.test_dataset = PyramidImages(500, input_size=self.config.encoder_kwargs.input_size, split='train') self.test_dataset = PyramidImages(None, input_size=self.config.encoder_kwargs.input_size, split='test', knowledge_states=self.config.knowledge_states) self.group_lookup = self._group_lookup_array(GROUPING) self.dataloader = DataLoader(self.test_dataset, batch_size=64, shuffle=True) self.n_labels = 5 if self.config.knowledge_states else 13
def __init__(self, problem, exp_dir, strategy='map'): self.config = load_config(os.path.join(exp_dir, 'config.json')) self._load_model(exp_dir) self.strategy = strategy self.student_data = CitizenshipLabels( 13, split='test', vocab=self.agent.train_dataset.vocab) self.dataloader = DataLoader(self.student_data, batch_size=1, shuffle=False)