args = parser.parse_args()
print(args)

args.cuda = args.use_cuda
cudnn.benchmark = True  # Should make training go faster for large models

torch.manual_seed(args.seed)
if args.cuda:
    torch.cuda.manual_seed(args.seed)

if args.random_threshold:
    args.threshold = random.gauss(float(args.mu), float(args.sigma))
    print("Using threshold ", args.threshold)

# get dataloaders
train_loader, test_loader = get_dataloaders(args)
if args.dataset == "cifar10":
    num_classes = 10
elif args.dataset == "cifar100":
    num_classes = 100

# create models
if args.model == "resnet18":
    model_a = resnet18(num_classes=num_classes)
    model_b = resnet18(num_classes=num_classes)

# create optimizer, loss function and schedualer
optimizer_a = torch.optim.SGD(
    model_a.parameters(),
    lr=args.learning_rate,
    momentum=0.9,
示例#2
0
sys.path.append(os.path.join(os.path.dirname(__file__), "../../"))

from src.gutout.gutout_utils import BatchGradCam
from src.utils.data_utils import get_dataloaders
from src.utils.misc import CSVLogger
from src.training.training_utils import (get_args, get_optimizer_and_schedular,
                                         get_model, create_experiment_dir,
                                         run_epoch, train, test)

if __name__ == "__main__":

    # parse arguments
    args, max_num_batches = get_args(hypterparameters_tune=True)

    # create train and test dataloaders
    train_loader, valid_loader, test_loader = get_dataloaders(
        args, need_validate=True, validate_proportion=0.8)
    # create model and optimizer
    # model = get_model(args, weights_path=args.model_a_path)

    # create experiment dir, csv logger and criterion
    experiment_dir, experiment_id = create_experiment_dir(args)
    csv_filename = os.path.join(experiment_dir,
                                f"HP_tune_" + experiment_id + f".csv")
    csv_logger = CSVLogger(
        args=args,
        fieldnames=[
            "decision",
            "mu",
            "sigma",
            "threshold",
            "epoch",