def run_predict_one_fold(fold: int):
    test_data_loader = get_test_data_loader()

    model = load_trained_model(f"checkpoint-f{fold}.pt")

    results = []

    for images, image_ids in test_data_loader:
        images = list(image.to(config.DEVICE) for image in images)
        outputs = model(images)
        for i, image in enumerate(images):
            boxes = outputs[i]['boxes'].cpu().detach().numpy()
            scores = outputs[i]['scores'].cpu().detach().numpy()
            boxes = boxes[scores > config.SCORE_THRESHOLD]
            scores = scores[scores > config.SCORE_THRESHOLD]
            image_id = image_ids[i]

            result = {
                'image_id': image_id,
                'PredictionString': format_prediction_string(boxes, scores)
            }

            results.append(result)

            save_labeled_image(image, boxes, image_id)

    test_df = pd.DataFrame(results, columns=['image_id', 'PredictionString'])
    test_df.to_csv('submission.csv', index=False)
def run_predict():
    test_data_loader = get_test_data_loader()

    for fold in range(0, config.NUM_FOLDS):
        models.append(load_trained_model(f"checkpoint-f{fold}.pt"))

    results = []

    for images, image_ids in test_data_loader:
        predictions = make_ensemble_predictions(images, models)
        for i, image in enumerate(images):
            boxes, scores, labels = run_wbf(predictions, image_index=i)
            boxes = boxes.astype(np.int32)
            image_id = image_ids[i]

            result = {
                'image_id': image_id,
                'PredictionString': format_prediction_string(boxes, scores)
            }
            results.append(result)

            save_labeled_image(image, boxes, image_id)

    test_df = pd.DataFrame(results, columns=['image_id', 'PredictionString'])
    test_df.to_csv('submission.csv', index=False)
Esempio n. 3
0
        print("Error: You need to specify an existing preprocessing json file.")
        print(preproc_file,": this file doesn't exist!")
        print(usage)
        sys.exit(1)
    if save_feature == -1:
        print("Error: You need to specify an existing features json file.")
        print(feature_file,": this file doesn't exist!")
        print(usage)
        sys.exit(1)

    print("Using %d core(s)"%cores)
    print("Using batches of %d files"%batch_quantity)
    print("Total number of files to process: %d"%num_files)
    
    print("Loading trained module...")
    model, model_path = mo.load_trained_model(args, config_model, metadata )
    if model is None:
        print("Some error ocurred during the loading of the trained module.")
        print("Please check if the module file exists and if it's valid.")
        print("Failed to load model: ",model_path)
        print(usage)
        sys.exit(1) 
        
    print("Initiating the extraction of features and classification process...")
    files = []
    for d in doc_ids:
        files.append(join(pdf_path,d+'.pdf'))
    over_batch = batch_quantity
    under_batch = 0
    num_batch = 0
                    
Esempio n. 4
0
def softmax_to_age(predictions):
    return np.sum(predictions * classes, axis=1)


if __name__ == "__main__":
    BASE_PATH = os.path.dirname(os.path.abspath(__file__))
    os.chdir(BASE_PATH)

    import sys
    sys.path.insert(0, os.path.dirname(__file__))
    sys.path.insert(0, os.getcwd())

    DATA_NAME = 'wiki'  # or imdb

    # load model
    vgg_face_age_model = load_trained_model()

    # load data
    data_df = None
    base_path = '.'
    if DATA_NAME == 'wiki':
        data_df = load_data.load_wiki_df_from_csv(
            '../data/processed/wiki_df.csv')
        base_path = '../data/raw/wiki_crop/'
        print("= " + str(len(data_df)) + " ROWS OF WIKI DATA LOADED\n")
    if DATA_NAME == 'imdb':
        data_df = load_data.load_imdb_df_from_pkl(
            '../data/processed/imdb_meta_df.pkl')
        data_df['full_path'] = data_df['full_path'].map(lambda x: x[0])
        base_path = '../data/raw/imdb_crop/'
        print("= " + str(len(data_df)) + " ROWS OF IMDB DATA LOADED\n")