Beispiel #1
0
"""
Custom Dataloaders for each of the considered datasets
"""

import os

from torchvision import datasets

from mixmo.augmentations.standard_augmentations import get_default_composed_augmentations
from mixmo.loaders import cifar_dataset, abstract_loader
from mixmo.utils.logger import get_logger

LOGGER = get_logger(__name__, level="DEBUG")


class CIFAR10Loader(abstract_loader.AbstractDataLoader):
    """
    Loader for the CIFAR10 dataset that inherits the abstract_loader.AbstractDataLoader dataloading API
    and defines the proper augmentations and datasets
    """
    def _init_dataaugmentations(self):
        (self.augmentations_train,
         self.augmentations_test) = get_default_composed_augmentations(
             dataset_name="cifar", )

    def _init_dataset(self, corruptions=False):
        self.train_dataset = cifar_dataset.CustomCIFAR10(
            root=self.data_dir,
            train=True,
            download=True,
            transform=self.augmentations_train)
Beispiel #2
0
"""
Wrapper functions for metric tracking
Mostly taken from https://github.com/bayesgroup/pytorch-ensembles/blob/master/metrics.py
"""

import numpy as np
from sklearn.metrics import roc_auc_score

from mixmo.utils import visualize
from mixmo.utils.logger import get_logger
from mixmo.core import metrics_ensemble

LOGGER = get_logger(__name__, level="INFO")



def merge_scores(scores_test, scores_val):
    """
    Aggregate scores
    """
    scores_valtest = {}
    for key in scores_test:
        key_valtest = "final/" + key.split("/")[1]
        if key.startswith("test/"):
            keyval = "val/" + key.split("/")[1]
            value = 0.5 * (scores_test[key]["value"] + scores_val[keyval]["value"])
            if scores_test[key]["string"].endswith("%"):
                value_str = f"{value:05.2%}"
            else:
                value_str = f"{value:.6}"
            stats = {"value": value, "string": value_str}