# use GPU if available
            params.cuda = torch.cuda.is_available()  # use GPU is available

            # Set the random seed for reproducible experiments
            torch.manual_seed(230)
            if params.cuda: torch.cuda.manual_seed(230)

            # Get the logger
            utils.set_logger(os.path.join(dir_path, 'evaluate.log'))

            # Create the input data pipeline
            # logging.info("Creating the dataset...")

            # fetch dataloaders
            test_dl = data_loader.fetch_test_dataloader(dir.split('__')[3], args.data_dir, params)

            # logging.info("- done.")

            # Define the model and optimizer
            models = {
                'linear': LinearRegression().cuda() if params.cuda else LinearRegression(),
                'mlp': MLP().cuda() if params.cuda else MLP(),
                'cnn': CNN().cuda() if params.cuda else CNN()
            }
            model = models[dir.split('__')[1]]

            # fetch loss function and metrics
            losses = {
                'crossentropy': CrossEntropyWithL1Loss(params.l1_reg),
                'hinge': MultiMarginWithL1Loss(params.l1_reg),
Exemple #2
0
    # use GPU if available
    params.cuda = torch.cuda.is_available()  # use GPU is available

    # Set the random seed for reproducible experiments
    torch.manual_seed(230)
    if params.cuda: torch.cuda.manual_seed(230)

    # Get the logger
    utils.set_logger(os.path.join(args.model_dir, 'evaluate.log'))

    # Create the input data pipeline
    logging.info("Creating the dataset...")

    # fetch dataloaders
    test_dl = data_loader.fetch_test_dataloader(args.dataset, args.data_dir,
                                                params)
    train_dl, val_dl = data_loader.fetch_train_dataloaders(
        args.dataset, args.data_dir, params)

    logging.info("- done.")

    # Define the model and optimizer
    models = {
        'linear':
        LinearRegression().cuda() if params.cuda else LinearRegression(),
        'mlp': MLP().cuda() if params.cuda else MLP(),
        'cnn': CNN().cuda() if params.cuda else CNN()
    }
    model = models[args.model]

    # fetch loss function and metrics
                model_dir=exp_dir,
                data_dir='data')

            print(cmd)
            check_call(cmd, shell=True)

            params = utils.Hyperparameters(os.path.join(exp_dir, 'params.json'))
            params.cuda = torch.cuda.is_available()
            torch.manual_seed(230)
            if params.cuda:
                torch.cuda.manual_seed(230)
            utils.set_logger(os.path.join(exp_dir, 'lg.log'))
            logging.info("Loading the datasets...")

            # fetch dataloaders
            test_dl = data_loader.fetch_test_dataloader(row['dataset'], 'data', params)

            logging.info("- done.")
            choices = {
                'linear': LinearRegression().cuda() if params.cuda else LinearRegression(bottleneck=True),
                'mlp': MLP(bottleneck=True).cuda() if params.cuda else MLP(bottleneck=True),
                'cnn': CNN(bottleneck=True).cuda() if params.cuda else CNN(bottleneck=True)
            }
            model = choices[row['model']]
            utils.load_checkpoint(os.path.join(exp_dir, 'best.pth.tar'), model)

            test_embeddings, test_labels = extract_embeddings(test_dl, model)

            if not os.path.isdir(figures_dir):
                os.mkdir(figures_dir)