def get_accumulator_stats(self, format="short", split=None): """ Gather tracked stats into a dictionary as formatted strings """ if not self._accumulator_len: return {} stats = AbstractLoss.get_accumulator_stats(self, format=format, split=split) if format == "long": # tensorboard logs if self.config_loss.get("l2_reg"): l2_reg = self.l2_reg().detach().to("cpu").numpy() stats["general/l2_reg"] = { "value": l2_reg, "string": f"{l2_reg:.4}", } for loss in self.losses: substats = loss["callable"].get_accumulator_stats( format=format, split=split, ) misc.clean_update(stats, substats) return stats
def get_report(self, print_flag=False): stats = OrderedDict({}) misc.clean_update( stats, self.get_diversity_ratioerrors(print_flag=print_flag)) misc.clean_update(stats, self.get_individualaccuracies(print_flag=print_flag)) return stats
def output_format(self, std_batch): """ Transforms standards batches into batches of sample summaries """ # Create M shuffled batches, one for each input batch_size = len(std_batch) list_shuffled_index = [ torchutils.randperm_static( batch_size, proba_static=self._proba_input_repetition) for _ in range(self.num_members) ] shuffled_batch = [ std_batch[list_shuffled_index[0][count]] for count in range(batch_size) ] # sample batch seed, shared among samples from the given batch batch_seed = random.randint(0, config.cfg.RANDOM.MAX_RANDOM) list_index = [ misc.clean_update( { "batch_seed": batch_seed, "index_" + str(0): shuffled_batch[count] }, { "index_" + str(num_member): shuffled_batch[list_shuffled_index[num_member][count]] for num_member in range(1, self.num_members) }) for count in range(batch_size) ] return list_index
def create_templates(template_path, config_dir, dataset): if os.path.exists(config_dir): LOGGER.debug("Folder templates already exists") rmtree(config_dir) os.mkdir(config_dir) template_output_path = os.path.join( config_dir, "exp_{dataset}_{networktype}_{mixmoparams}_{dataaugparams}_{scheduling}.yaml" ) for dict_template in DICT_DATASET_CONFIG[dataset]["templates"]: params = copy.deepcopy(DICT_DATASET_CONFIG[dataset]["shared_config"]) params.update(DICT_NETWORK[dict_template["networktype"]]) save_params = copy.deepcopy(params) for trainingfiltering in dict_template["trainingfiltering"]: for imixmo in trainingfiltering["mixmoparams"]: for idataaug in trainingfiltering["dataaugparams"]: for ische in trainingfiltering["scheduling"]: misc.clean_update( params, DICT_CONFIG["mixmoparams"]["_default"] ) misc.update(params, DICT_CONFIG["mixmoparams"][imixmo], method="dirty") misc.clean_update( params, DICT_CONFIG["dataaugparams"]["_default"] ) misc.update(params, DICT_CONFIG["dataaugparams"][idataaug], method="dirty") misc.clean_update( params, DICT_CONFIG["scheduling"][params['dataset_name']]["_default"] ) misc.clean_update( params, DICT_CONFIG["scheduling"][params['dataset_name']][ische] ) # templating output_path = template_output_path.format(**{ "dataset": dataset, "networktype": dict_template["networktype"], "scheduling": ische, "mixmoparams": imixmo, "dataaugparams": idataaug }) if os.path.exists(output_path): raise ValueError(output_path) output_path = use_template( template_path=template_path, output_path=output_path, params=params, ) params = copy.deepcopy(save_params)
def get_dict_to_scores( self, split, ): """ Format logs into a dictionary """ logs_dict = OrderedDict({}) if split == "train": lr_value = self.optimizer.param_groups[0]["lr"] logs_dict[f"general/{self.name}_lr"] = { "value": lr_value, "string": f"{lr_value:05.5}", } misc.clean_update( logs_dict, self.loss.get_accumulator_stats(format="long", split=split)) if self.mode == "eval": LOGGER.info(f"Compute metrics for {self.name} at split: {split}") scores = self._metrics.get_scores(split=split) for s in scores: logs_dict[s] = scores[s] return logs_dict