params.cuda = False

    # Set the random seed for reproducible experiments
    random.seed(230)
    torch.manual_seed(230)
    if params.cuda: torch.cuda.manual_seed(230)

    # Set the logger
    utils.set_logger(os.path.join(args.model_dir, 'train.log'))

    # Create the input data pipeline
    logging.info("Loading the datasets...")

    # fetch dataloaders, considering full-set vs. sub-set scenarios
    if params.subset_percent < 1.0:
        train_dl = data_loader.fetch_subset_dataloader('train', params)
    else:
        train_dl, _ = data_loader.fetch_dataloader('train', params)
    
    dev_dl, _ = data_loader.fetch_dataloader('dev', params)

    logging.info("- done.")

    """Based on the model_version, determine model/optimizer and KD training mode
       WideResNet and DenseNet were trained on multi-GPU; need to specify a dummy
       nn.DataParallel module to correctly load the model parameters
    """
    if "distill" in params.model_version:
        student_model_load_start = time.time()
        # train a 5-layer CNN or a 18-layer ResNet with knowledge distillation
        if params.model_version == "cnn_distill":
Пример #2
0
def fetch_subset_data(datadir, params):
    """ fetch the subset dataloaders """
    num = 10
    dl = data_loader.fetch_subset_dataloader(['train', 'test'], datadir,
                                             params, num)
    return dl, num
    params.cuda = torch.cuda.is_available()

    # Set the random seed for reproducible experiments
    random.seed(230)
    torch.manual_seed(230)
    if params.cuda: torch.cuda.manual_seed(230)

    # Set the logger
    utils.set_logger(os.path.join(args.model_dir, 'train.log'))

    # Create the input data pipeline
    logging.info("Loading the datasets...")

    # fetch dataloaders, considering full-set vs. sub-set scenarios
    if params.subset_percent < 1.0:
        train_dl = data_loader.fetch_subset_dataloader('train', params)
    else:
        train_dl = data_loader.fetch_dataloader('train', params)
    
    dev_dl = data_loader.fetch_dataloader('dev', params)

    logging.info("- done.")

    """Based on the model_version, determine model/optimizer and KD training mode
       WideResNet and DenseNet were trained on multi-GPU; need to specify a dummy
       nn.DataParallel module to correctly load the model parameters
    """
    if "distill" in params.model_version:

        # train a 5-layer CNN or a 18-layer ResNet with knowledge distillation
        if params.model_version == "cnn_distill":
Пример #4
0
    torch.manual_seed(200)
    if params.cuda:
        torch.cuda.manual_seed(200)

    # set the logger
    utils.set_logger(os.path.join(args.model_dir, 'train.log'))

    # create the input data pipeline
    logging.info('Loading datasets...')

    # fetch the data loaders
    # if in test mode, fetch 10 batches

    if args.run_mode == 'test':
        data_loaders = data_loader.fetch_subset_dataloader(['train', 'test'],
                                                           args.data_dir,
                                                           params, 10)
        train_dl = data_loaders['train']
        test_dl = data_loaders['test']
    else:
        data_loaders = data_loader.fetch_dataloader(['train', 'test'],
                                                    args.data_dir, params)
        train_dl = data_loaders['train']
        test_dl = data_loaders['test']

    logging.info('- done.')

    # define the model
    myModel = net.Net(params).cuda() if params.cuda else net.Net(params)

    # define the optimizer