Ejemplo n.º 1
0
def handle_alphabet(folder):
    print('{}...'.format(folder.split('/')[-1]))
    for rotate in [0, 90, 180, 270]:
        # Create new folders for each augmented alphabet
        mkdir(f'{folder}.{rotate}')
        for root, character_folders, _ in os.walk(folder):
            for character_folder in character_folders:
                # For each character folder in an alphabet rotate and resize all of the images and save
                # to the new folder
                handle_characters(folder, root + '/' + character_folder, rotate)
                # return

    # Delete original alphabet
    rmdir(folder)
Ejemplo n.º 2
0
def handle_alphabet(folder):
    print('{}...'.format(folder.split('/')[-1]))
    n_variations_character = 10
    transforms = torchvision.transforms.Compose([
        torchvision.transforms.ToPILImage(),
        torchvision.transforms.RandomAffine(20, scale=(0.9, 1.1), shear=20),
        # torchvision.transforms.ToTensor(),
    ])

    for rotate in [0]:
        # Create new folders for each augmented alphabet
        mkdir(str(folder) + '.' + str(rotate))
        for root, character_folders, _ in os.walk(folder):
            for character_folder in character_folders:
                # For each character folder in an alphabet rotate and resize all of the images and save
                # to the new folder
                handle_characters(folder,
                                  root + '/' + character_folder,
                                  n_variations=n_variations_character,
                                  rotate=rotate)
                # return

    # Delete original alphabet
    rmdir(folder)
Ejemplo n.º 3
0
    for rotate in [0, 90, 180, 270]:
        # Create new folders for each augmented alphabet
        mkdir(f'{folder}.{rotate}')
        for root, character_folders, _ in os.walk(folder):
            for character_folder in character_folders:
                # For each character folder in an alphabet rotate and resize all of the images and save
                # to the new folder
                handle_characters(folder, root + '/' + character_folder, rotate)
                # return

    # Delete original alphabet
    rmdir(folder)


# Clean up previous extraction
rmdir(prepared_omniglot_location)
mkdir(prepared_omniglot_location)

# Unzip dataset
for root, _, files in os.walk(raw_omniglot_location):
    for f in files:
        if f in dataset_zip_files:
            print('Unzipping {}...'.format(f))
            zip_ref = zipfile.ZipFile(root + f, 'r')
            zip_ref.extractall(prepared_omniglot_location)
            zip_ref.close()

print('Processing background set...')
for root, alphabets, _ in os.walk(prepared_omniglot_location + 'images_background/'):
    for alphabet in sorted(alphabets):
        handle_alphabet(root + alphabet)
Ejemplo n.º 4
0
evaluation_classes = list(
    map(
        lambda x: x.strip(), "\
Jeans, Bracelet, Eyeshadow, Sweaters, Sarees, Earrings, Casual Shoes,\
Tracksuits, Clutches, Socks, Innerwear Vests, Night suits, Salwar, Stoles, Face\
Moisturisers, Perfume and Body Mist, Lounge Shorts, Scarves, Briefs, Jumpsuit, Wallets,\
Foundation and Primer, Sports Shoes, Highlighter and Blush, Sunscreen, Shoe\
Accessories, Track Pants, Fragrance Gift Set, Shirts, Sweatshirts, Mask and Peel,\
Jewellery Set, Face Wash and Cleanser, Messenger Bag, Free Gifts, Kurtas, Mascara,\
Lounge Pants, Caps, Lip Care, Trunk, Tunics, Kurta Sets, Sunglasses, Lipstick, Churidar,\
Travel Accessory\
".split(',')))

# Clean up folders
rmdir(DATA_PATH + '/fashion_small/images_background')
rmdir(DATA_PATH + '/fashion_small/images_evaluation')

# Create class folders
for c in background_classes:
    mkdir(DATA_PATH + f'/fashion_small/images_background/{c}/')

for c in evaluation_classes:
    mkdir(DATA_PATH + f'/fashion_small/images_evaluation/{c}/')

root = path_to_fashion + '/images'
with open(path_to_fashion + '/styles.csv') as f:
    for line in tqdm(f.readlines()[1:]):
        line = line.split(',')
        class_name = line[4]
        if class_name in evaluation_classes:
Ejemplo n.º 5
0
1. Download files from https://drive.google.com/file/d/0B3Irx3uQNoBMQ1FlNXJsZUdYWEE/view and place in
    data/miniImageNet/images
2. Run the script
"""
from tqdm import tqdm as tqdm
import numpy as np
import shutil
import os

from config import DATA_PATH
from few_shot.utils import mkdir, rmdir

import pdb; pdb.set_trace()
# Clean up folders
rmdir(DATA_PATH + '/miniImageNet/images_background')
rmdir(DATA_PATH + '/miniImageNet/images_evaluation')
mkdir(DATA_PATH + '/miniImageNet/images_background')
mkdir(DATA_PATH + '/miniImageNet/images_evaluation')

# Find class identities
classes = []
for root, _, files in os.walk(DATA_PATH + '/miniImageNet/images/'):
    for f in files:
        if f.endswith('.jpg'):
            classes.append(f[:-12])

classes = list(set(classes))

# Train/test split
np.random.seed(0)
This script uses the 100 classes of 600 images each used in the Matching Networks paper. The exact images used are
given in data/mini_imagenet.txt which is downloaded from the link provided in the paper (https://goo.gl/e3orz6).
1. Download files from https://drive.google.com/file/d/0B3Irx3uQNoBMQ1FlNXJsZUdYWEE/view and place in
    data/miniImageNet/images
2. Run the script
"""
from tqdm import tqdm as tqdm
import numpy as np
import shutil
import os

from config import DATA_PATH
from few_shot.utils import mkdir, rmdir

# Clean up folders
rmdir(DATA_PATH + '/few-shot/images_background')
rmdir(DATA_PATH + '/few-shot/images_evaluation')
mkdir(DATA_PATH + '/few-shot/images_background')
mkdir(DATA_PATH + '/few-shot/images_evaluation')

# Find class identities
classes = []
for root, _, files in os.walk(DATA_PATH + '/few-shot/images/'):
    for f in files:
        if f.endswith('.jpg'):
            classes.append(f[:-12])

classes = list(set(classes))

# Train/test split
np.random.seed(0)
Ejemplo n.º 7
0
from distutils.dir_util import copy_tree
from tqdm import tqdm as tqdm
from os import walk
import numpy as np
import shutil

from config import DATA_PATH
from few_shot.utils import mkdir, rmdir

# Clean up folders
rmdir(DATA_PATH + '/kaggle/images_background')
rmdir(DATA_PATH + '/kaggle/images_evaluation')
rmdir(DATA_PATH + '/kaggle/images_test')
mkdir(DATA_PATH + '/kaggle/images_background')
mkdir(DATA_PATH + '/kaggle/images_evaluation')
mkdir(DATA_PATH + '/kaggle/images_test')

classes = []
for _, folders, _ in walk(DATA_PATH + '/kaggle/images'):
    for f in folders:
        classes.append(f)

np.random.seed(0)
np.random.shuffle(classes)
background_classes, evaluation_classes, test_classes = classes[:80], classes[
    80:100], classes[100:]

print('Preparing background_data....')
for i in tqdm(range(len(background_classes))):
    folder = background_classes[i]
    src = DATA_PATH + '/kaggle/images/' + folder
Ejemplo n.º 8
0
from distutils.dir_util import copy_tree
from tqdm import tqdm as tqdm
from os import walk
import numpy as np
import shutil

from config import DATA_PATH
from few_shot.utils import mkdir, rmdir

# Clean up folders
rmdir(DATA_PATH + '/whoas/images_background')
rmdir(DATA_PATH + '/whoas/images_evaluation')
mkdir(DATA_PATH + '/whoas/images_background')
mkdir(DATA_PATH + '/whoas/images_evaluation')

classes = []
for _, folders, _ in walk(DATA_PATH + '/whoas/images'):
    for f in folders:
        classes.append(f)

np.random.seed(0)
np.random.shuffle(classes)
background_classes, evaluation_classes = classes[:83], classes[83:]

print('Preparing background_data....')
for i in tqdm(range(len(background_classes))):
    folder = background_classes[i]
    src = DATA_PATH + '/whoas/images/' + folder
    dst = DATA_PATH + '/whoas/images_background/' + folder
    copy_tree(src, dst)
from PIL import Image

from config import DATA_PATH
from few_shot.utils import mkdir, rmdir

import sys
sys.path.append('./')

parser = argparse.ArgumentParser()
parser.add_argument('--size',
                    default='small',
                    help='Dataset size: small or large (default: small)')
args = parser.parse_args()

# Clean up folders
rmdir(DATA_PATH + '/fashion-dataset/images_background')
rmdir(DATA_PATH + '/fashion-dataset/images_evaluation')
mkdir(DATA_PATH + '/fashion-dataset/images_background')
mkdir(DATA_PATH + '/fashion-dataset/images_evaluation')

# Find class identities
classes = []
meta_file = open(DATA_PATH + '/fashion-dataset/styles.csv', 'r')
meta_data = meta_file.readlines()[1:]
meta_train_file = open(DATA_PATH + '/fashion-dataset/metaTrain.txt', 'r')
meta_test_file = open(DATA_PATH + '/fashion-dataset/metaTest.txt', 'r')
meta_train_data = set([line.rstrip() for line in meta_train_file])
meta_test_data = set([line.rstrip() for line in meta_test_file])

# Train/test split
background_classes, evaluation_classes = meta_train_data, meta_test_data
Ejemplo n.º 10
0
2. Run the script

"""
import sys
sys.path.append('../')
import os
import shutil
import natsort
import numpy as np
from tqdm import tqdm as tqdm
import pandas as pd
from config import DATA_PATH
from few_shot.utils import mkdir, rmdir

# Creating and deleting folder/files
rmdir(DATA_PATH + '/fashionNet/images_background')
rmdir(DATA_PATH + '/fashionNet/images_evaluation')
rmdir(DATA_PATH + '/fashionNet/refac_Images')
mkdir(DATA_PATH + '/fashionNet/images_background')
mkdir(DATA_PATH + '/fashionNet/images_evaluation')
mkdir(DATA_PATH + '/fashionNet/refac_Images')

print("Is the DATA_PATH is Correct?",
      os.path.exists(DATA_PATH + '/fashionNet/images/'))
'''
Directory File Name Change
1. styles.csv to map image_id and subCategory, class_laebls, meta_sets
2. Rename the images using os.rename() for support and query split
'''

_classes = []