Example #1
0
def time_dataset(dataset_name):
    print("Timing how long it takes to load dataset {}".format(dataset_name))
    start_time = time.time()
    dataloader = CustomDataloader(dataset=dataset_name, **options)
    batch_count = 0
    for example in dataloader:
        batch_count += 1
    time_taken = time.time() - start_time
    print("Loaded {} examples from {} in {:.3f}s".format(
        dataloader.count(), dataset_name, time_taken))
    print("Image size: {0}x{0} \tTime per image: {1:.4f}s".format(
        dataloader.img_conv.img_shape, time_taken / dataloader.count()))
    return time_taken
def get_comparison_dataloader(comparison_dataset=None, **options):
    if not comparison_dataset:
        return
    comparison_options = options.copy()
    comparison_options['dataset'] = comparison_dataset
    comparison_options['last_batch'] = True
    comparison_options['shuffle'] = False
    comparison_dataloader = CustomDataloader(**comparison_options)
    return comparison_dataloader
def evaluate_with_comparison(networks, dataloader, comparison_dataloader=None, **options):
    # comparison_dataloader = get_comparison_dataloader(**options)
    # if comparison_dataloader:
        # options['fold'] = 'openset_{}'.format(comparison_dataloader.dsf.name)
    options['fold'] = 'openset_{}'.format(options['data_dir'].split('/')[-1])
    if options.get('mode'):
        options['fold'] += '_{}'.format(options['mode'])
    if options.get('aux_dataset'):
        aux_dataset = CustomDataloader(options['aux_dataset'])
        options['fold'] = '{}_{}'.format(options.get('fold'), aux_dataset.dsf.count())

    new_results = evaluation.evaluate_classifier(networks, dataloader, comparison_dataloader, **options)

    if comparison_dataloader is not None:
        openset_results = evaluation.evaluate_openset(networks, dataloader, comparison_dataloader, **options)
        new_results[options['fold']].update(openset_results)
    return new_results[options['fold']]
# Other options can change with every run
parser.add_argument('--batch_size', type=int, default=64, help='Batch size [default: 64]')
parser.add_argument('--fold', type=str, default='train', help='Fold [default: train]')
parser.add_argument('--start_epoch', type=int, help='Epoch to start from (defaults to most recent epoch)')
parser.add_argument('--count', type=int, default=1, help='Number of counterfactuals to generate')

options = vars(parser.parse_args())

sys.path.append(os.path.dirname(os.path.dirname(__file__)))
from dataloader import CustomDataloader
import counterfactual
from networks import build_networks
from options import load_options


# TODO: Right now, to edit cf_speed et al, you need to edit params.json

start_epoch = options['start_epoch']
options = load_options(options)
options['epoch'] = start_epoch

dataloader = CustomDataloader(**options)

# Batch size must be large enough to make a square grid visual
options['batch_size'] = dataloader.num_classes + 1

networks = build_networks(dataloader.num_classes, **options)

for i in range(options['count']):
    counterfactual.generate_counterfactual(networks, dataloader, **options)
options = vars(parser.parse_args())

sys.path.append(os.path.dirname(os.path.dirname(__file__)))
from dataloader import CustomDataloader, FlexibleCustomDataloader
from training import train_classifier
from networks import build_networks, save_networks, get_optimizers
from options import load_options, get_current_epoch
from comparison import evaluate_with_comparison
from evaluation import save_evaluation

options = load_options(options)
dataloader = FlexibleCustomDataloader(fold='train', **options)
networks = build_networks(dataloader.num_classes, **options)
optimizers = get_optimizers(networks, finetune=True, **options)

eval_dataloader = CustomDataloader(last_batch=True,
                                   shuffle=False,
                                   fold='test',
                                   **options)

start_epoch = get_current_epoch(options['result_dir']) + 1
for epoch in range(start_epoch, start_epoch + options['epochs']):
    train_classifier(networks, optimizers, dataloader, epoch=epoch, **options)
    #print(networks['classifier_kplusone'])
    #weights = networks['classifier_kplusone'].fc1.weight
    eval_results = evaluate_with_comparison(networks, eval_dataloader,
                                            **options)
    pprint(eval_results)
    save_evaluation(eval_results, options['result_dir'], epoch)
    save_networks(networks, epoch, options['result_dir'])
Example #6
0
                    help='Dataset for off-manifold comparison')
options = vars(parser.parse_args())

# Import the rest of the project
sys.path.append(os.path.dirname(os.path.dirname(__file__)))
from dataloader import CustomDataloader
from networks import build_networks
from options import load_options, get_current_epoch
from evaluation import evaluate_classifier, evaluate_openset, save_evaluation

options = load_options(options)
if not options.get('epoch'):
    options['epoch'] = get_current_epoch(options['result_dir'])
options['random_horizontal_flip'] = False

dataloader = CustomDataloader(last_batch=True, shuffle=False, **options)

networks = build_networks(dataloader.num_classes, **options)

comparison_dataloader = None
if options['comparison_dataset']:
    comparison_options = options.copy()
    comparison_options['dataset'] = options['comparison_dataset']
    comparison_dataloader = CustomDataloader(last_batch=True,
                                             shuffle=False,
                                             **comparison_options)
    comparison_name = options['comparison_dataset'].split('/')[-1].split(
        '.')[0]
    labels_dir = os.path.join(options['result_dir'], 'labels')
    if os.path.exists(labels_dir):
        label_count = len(os.listdir(labels_dir))
Example #7
0
parser = argparse.ArgumentParser()
parser.add_argument('--result_dir', help='Output directory for images and model checkpoints [default: .]', default='.')
parser.add_argument('--epochs', type=int, default=10, help='number of epochs to train for [default: 10]')
parser.add_argument('--aux_dataset', help='Path to aux_dataset file [default: None]')

options = vars(parser.parse_args())

sys.path.append(os.path.dirname(os.path.dirname(__file__)))
from dataloader import CustomDataloader, FlexibleCustomDataloader
from training import train_gan
from networks import build_networks, save_networks, get_optimizers
from options import load_options, get_current_epoch
from counterfactual import generate_counterfactual
from comparison import evaluate_with_comparison

options = load_options(options)
dataloader = FlexibleCustomDataloader(fold='train', **options)
eval_dataloader = CustomDataloader(fold='test', **options)

networks = build_networks(dataloader.num_classes, **options)
optimizers = get_optimizers(networks, **options)

start_epoch = get_current_epoch(options['result_dir']) + 1
for epoch in range(start_epoch, start_epoch + options['epochs']):
    train_gan(networks, optimizers, dataloader, epoch=epoch, **options)
    #generate_counterfactual(networks, dataloader, **options)
    eval_results = evaluate_with_comparison(networks, eval_dataloader, **options)
    pprint(eval_results)
    save_networks(networks, epoch, options['result_dir'])
Example #8
0
from options import load_options, get_current_epoch
from dataloader import CustomDataloader
from networks import build_networks
from evaluation import evaluate_classifier
from locking import acquire_lock, release_lock

print("Loading any available saved options from {}/params.json".format(
    options['result_dir']))
options = load_options(options)

print("Switching to the most recent version of the network saved in {}".format(
    options['result_dir']))
options['epoch'] = get_current_epoch(options['result_dir'])

print("Loading dataset from file {}".format(options['dataset']))
dataloader = CustomDataloader(last_batch=True, shuffle=False, **options)

print("Loading neural network weights...")
nets = build_networks(dataloader.num_classes, **options)

examples.run_example_code(nets, dataloader, **options)

print("Evaluating the accuracy of the classifier on the {} fold".format(
    options['fold']))
new_results = evaluate_classifier(nets, dataloader, verbose=False, **options)

print("Results from evaluate_classifier:")
pprint(new_results)

acquire_lock(options['result_dir'])
try:
Example #9
0
parser = argparse.ArgumentParser()
parser.add_argument('--dataset', type=str, required=True)
parser.add_argument('--batch_size', type=int, default=64)
parser.add_argument('--lr', type=float, default=2e-4)
parser.add_argument('--epochs', type=int, default=10)
parser.add_argument('--latent_size', type=int, default=16)
parser.add_argument('--start_epoch', type=int, default=0)
parser.add_argument('--lambda_gan', type=float, default=0.1)
parser.add_argument('--huber_scale', type=float, default=1.0)
parser.add_argument('--disc_iters', type=int, default=5)

args = parser.parse_args()

from dataloader import CustomDataloader
loader = CustomDataloader(args.dataset,
                          batch_size=args.batch_size,
                          image_size=64)

print('Building model...')

discriminator = model.Discriminator().to(device)
generator = model.Generator(args.latent_size).to(device)
encoder = model.Encoder(args.latent_size).to(device)

optim_disc = optim.Adam(filter(lambda p: p.requires_grad,
                               discriminator.parameters()),
                        lr=args.lr,
                        betas=(0.0, 0.9))
optim_gen = optim.Adam(generator.parameters(), lr=args.lr, betas=(0.0, 0.9))
optim_gen_gan = optim.Adam(generator.parameters(),
                           lr=args.lr,
# Import the rest of the project
sys.path.append(os.path.dirname(os.path.dirname(__file__)))
from dataloader import CustomDataloader
from networks import build_networks
from options import load_options, get_current_epoch
from evaluation import save_evaluation
from comparison import evaluate_with_comparison

options = load_options(options)
if not options.get('epoch'):
    options['epoch'] = get_current_epoch(options['result_dir'])
# TODO: Globally disable dataset augmentation during evaluation
options['random_horizontal_flip'] = False

dataloader = CustomDataloader(last_batch=True, shuffle=False, **options)

# TODO: structure options in a way that doesn't require this sort of hack
train_dataloader_options = options.copy()
train_dataloader_options['fold'] = 'train'
dataloader_train = CustomDataloader(last_batch=True,
                                    shuffle=False,
                                    **train_dataloader_options)

networks = build_networks(dataloader.num_classes, **options)

new_results = evaluate_with_comparison(networks,
                                       dataloader,
                                       dataloader_train=dataloader_train,
                                       **options)
Example #11
0
parser.add_argument('--lr', type=float, default=2e-4)
parser.add_argument('--save_to_dir', type=str, default='checkpoints')
parser.add_argument('--load_from_dir', type=str, default='checkpoints')
parser.add_argument('--epochs', type=int, default=10)
parser.add_argument('--latent_size', type=int, default=16)
parser.add_argument('--start_epoch', type=int, default=0)
parser.add_argument('--lambda_gan', type=float, default=0.1)
parser.add_argument('--dataset', type=str, required=True)

args = parser.parse_args()

from dataloader import CustomDataloader
from converter import SkyRTSConverter

loader = CustomDataloader(args.dataset,
                          batch_size=args.batch_size,
                          img_format=SkyRTSConverter)
test_loader = CustomDataloader(args.dataset,
                               batch_size=args.batch_size,
                               img_format=SkyRTSConverter,
                               fold='test')

print('Building model...')

discriminator = model.Discriminator().to(device)
generator = model.Generator(args.latent_size).to(device)
encoder = model.Encoder(args.latent_size).to(device)
value_estimator = model.ValueEstimator(args.latent_size).to(device)
predictor = model.Predictor(args.latent_size).to(device)

if args.start_epoch: