Beispiel #1
0
def load_all(config, args, mode):
    features_path = get_results_path(config.results_path, 'features',
                                     args.prefix, mode)
    image_patches = np.load(features_path / 'image_patches.npy')
    feature_matrix = np.load(features_path / 'feature_matrix.npy')
    labels = np.load(features_path / 'labels.npy')
    return image_patches, feature_matrix, labels
Beispiel #2
0

if __name__ == '__main__':
    logger = get_logger('visual_histograms')
    set_excepthook(logger)

    args = parse_arguments()
    config = load_config(args.config)
    set_seed(config.seed)
    model_type = 'bow' if args.bow else 'fv'
    CLUSTERS_NUM = 10

    # train_image_patches, train_feature_matrix, train_labels = load_all(config, args, 'train')
    test_image_patches, test_feature_matrix, test_labels = load_all(
        config, args, 'test')
    model_path = get_results_path(config.results_path, model_type, args.prefix,
                                  str(CLUSTERS_NUM))
    model = joblib.load(model_path / 'best_model.pkl').best_estimator_
    transformer_name = 'bag_of_words' if args.bow else 'fisher_vector'
    transformer = model.named_steps[transformer_name]

    # train_points = transformer.transform(train_feature_matrix)
    test_points = transformer.transform(test_feature_matrix)

    # compute distances from train and test to gmm clusters
    # train_distances = cdist(
    # train_feature_matrix.reshape(-1, 256), transformer.gmm_[0].transpose())
    # test_distances = cdist(
    #     test_feature_matrix.reshape(-1, 256), transformer.gmm_[0].transpose())

    # plot_similarity_mosaic(test_distances, test_image_patches, model_path)
Beispiel #3
0
    stats = dict(zip(unique.tolist(), counts.tolist()))
    with open(path, mode='w') as f:
        yaml.dump(stats, f)


if __name__ == '__main__':
    logger = get_logger('extract_features')
    set_excepthook(logger)

    args = parse_arguments()
    config = load_config(args.config)
    set_seed(config.seed)
    mode = 'test' if args.test else 'train'
    if args.augment:
        args.prefix += '_aug'
    results_path = get_results_path(config.results_path, args.model,
                                    args.prefix, mode)
    logger.info(('Extracting features...\n'
                 'prefix: %s\n'
                 'mode: %s\n'
                 'augmentation: %s\n'
                 'model: %s'), args.prefix, mode, args.augment, args.model)

    transform = [
        NumpyToTensor(),
        get_normalization_transform(),
    ]
    if args.augment:
        transform.insert(0, NumpyGaussianNoise(sigma=0.01))
    transform = Compose(transform)

    augmentation = None
Beispiel #4
0
                        help='path to python module with shared experiment configuration')
    parser.add_argument('--augment', action='store_true',
                        help='enable augmentation')
    parser.add_argument('--features', default='alexnet',
                        help='which feature extraction method to use; can be one of alexnet, resnet18, inceptionv3')
    return parser.parse_args()


if __name__ == '__main__':
    logger = get_logger('hyperparameters')
    set_excepthook(logger)

    args = parse_arguments()
    config = load_config(args.config)
    set_seed(config.seed)
    features_path = get_results_path(
        config.results_path, args.features, args.prefix, 'train')
    if args.augment:
        args.prefix += '_aug'
        aug_features_path = get_results_path(
            config.results_path, args.features, args.prefix, 'train')
    train_results_path = get_results_path(
        config.results_path, args.features, str(args.model) + '_' + str(args.prefix), 'train')
    train_results_path.mkdir(parents=True, exist_ok=True)
    logger.info('Fitting hyperparameters for prefix %s with %s model',
                args.prefix, args.model)

    feature_matrix = np.load(features_path / 'feature_matrix.npy')
    labels = np.load(features_path / 'labels.npy')
    if args.augment:
        aug_feature_matrix = np.load(aug_features_path / 'feature_matrix.npy')
        aug_labels = np.load(aug_features_path / 'labels.npy')
Beispiel #5
0
                        default=50,
                        type=int,
                        help='clusters number to train the model')
    return parser.parse_args()


if __name__ == '__main__':
    logger = get_logger('train_model')
    set_excepthook(logger)

    args = parse_arguments()
    config = load_config(args.config)
    set_seed(config.seed)
    model = 'bow' if args.bow else 'fv'

    train_features_path = get_results_path(config.results_path, 'features',
                                           args.prefix, 'train')
    logger.info('Fitting model...')  # add config
    feature_matrix = np.load(train_features_path / 'feature_matrix.npy')
    labels = np.load(train_features_path / 'labels.npy')
    pipeline = bow if args.bow else fisher_vector_transformer
    if args.bow:
        param_grid = config.bow_param_grid
        for i in range(2):
            param_grid[i]['bag_of_words__clusters_number'] = [args.clusters]
    else:
        param_grid = config.fv_param_grid
        for i in range(2):
            param_grid[i]['fisher_vector__gmm_clusters_number'] = [
                args.clusters
            ]
    pipeline = model_selection.GridSearchCV(pipeline, param_grid, n_jobs=24)