subset_dataset = get_subset_dataset(full_dataset=full_dataset,
                                        examples_per_class=args.examples_per_class,
                                        epc_seed=row.epc_seed,
                                        root=osp.join(args.dataset_root, args.dataset),
                                        train=True,
                                        transform=transform,
                                        download=True
                                        )

    loader = DataLoader(dataset=subset_dataset,
                        drop_last=False,
                        batch_size=args.batch_size)

    C = row.num_classes
    logger.info('Starting layerwise hessian decomposition...')
    for layer_index, (layer_name, _) in enumerate(model.named_parameters()):
        logger.info('layer_index: {}\nlayer_name: {}'.format(layer_index, layer_name))
        logger.info('decomposing hessian')
        Hess = LayerHessian(crit='CrossEntropyLoss',
                            loader=loader,
                            device=device,
                            model=model,
                            num_classes=C,
                            layer_name=layer_name,
                            hessian_type='Hessian',
                            init_poly_deg=64,
                            poly_deg=128,
                            spectrum_margin=0.05,
                            poly_points=1024,
                            SSI_iters=128
                            )
                                        train=True,
                                        transform=transform,
                                        download=True
                                        )

    loader = DataLoader(dataset=subset_dataset,
                        drop_last=False,
                        batch_size=args.batch_size)

    C = row.num_classes
    logger.info('Starting layerwise hessian decomposition...')

    ub = list()
    lb = list()

    for layer_name, param in model.named_parameters():
        if 'weight' not in layer_name:
            continue
        if '.bn' in layer_name:
            continue
        logger.info('layer_name: {}'.format(layer_name))
        Hess = LayerHessian(crit='CrossEntropyLoss',
                            loader=loader,
                            device=device,
                            model=model,
                            num_classes=C,
                            layer_name=layer_name,
                            hessian_type='Hessian',
                            init_poly_deg=64,
                            poly_deg=128,
                            spectrum_margin=0.05,
        model_weights_path, results_path = download_model(model_url, ckpt_dir)
        df = pd.read_csv(results_path)
        row = df.iloc[0]

    if args.model in [
            'VGG11_bn', 'Resnet18', 'DenseNet3_40', 'LeNet', 'MobileNet'
    ]:
        model = Network().construct(args.model, row)
    else:
        raise Exception('Unknown model argument: {}'.format(args.model))

    if args.print_layers:
        total_params = sum(p.numel() for p in model.parameters()
                           if p.requires_grad)
        print('Total number of parameters: ', total_params, '\nlayers:')
        pp.pprint(list(name for (name, _) in model.named_parameters()))
        exit(code=0)

    state_dict = torch.load(model_weights_path,
                            map_location=lambda storage, loc: storage)
    if args.new:
        state_dict = state_dict['model']
    model.load_state_dict(state_dict, strict=True)
    model = model.to(device)

    model = model.eval()

    mean, std = get_mean_std(args.dataset)
    pad = int((row.padded_im_size - row.im_size) / 2)
    transform = transforms.Compose([
        transforms.Pad(pad),
    model_weights_path, results_path = download_model(model_url, ckpt_dir)
    df = pd.read_csv(results_path)
    row = df.iloc[0]

    if args.model in [
            'VGG11_bn', 'ResNet18', 'DenseNet3_40', 'MobileNet', 'LeNet'
    ]:
        model = Network().construct(args.model, row)
    else:
        raise Exception('Unknown model argument: {}'.format(args.model))

    if args.print_layers:
        total_params = sum(p.numel() for p in model.parameters()
                           if p.requires_grad)
        print('Total number of parameters: ', total_params, '\nlayers:')
        pp.pprint(list(name for (name, _) in model.named_parameters()))
        exit(code=0)

    state_dict = torch.load(model_weights_path,
                            map_location=lambda storage, loc: storage)
    model.load_state_dict(state_dict, strict=True)
    model = model.to(device)
    if args.cuda is None:
        gpus = torch.cuda.device_count()
        if gpus > 1:
            model = nn.DataParallel(model, device_ids=range(gpus))
    model = model.eval()

    mean, std = get_mean_std(args.dataset)
    pad = int((row.padded_im_size - row.im_size) / 2)
    transform = transforms.Compose([