Esempio n. 1
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        "--tolabel",
        help="Preprocess images to create labels (out/tolabel)",
        action="store_true",
        default=False)
    parser.add_argument("--augmentation",
                        help="Dataset augmentation (pass quantity)",
                        type=int)
    parser.add_argument("--dataset",
                        help="Dataset name",
                        type=str,
                        default=constant.DATASET)
    parser.add_argument("--train",
                        help="Train",
                        action="store_true",
                        default=False)
    parser.add_argument("--test",
                        help="Predict",
                        action="store_true",
                        default=False)
    parser.add_argument("--arch",
                        help="Neural Network architecture",
                        type=str,
                        default=constant.MODEL)
    parser.add_argument("--dip",
                        help="Method for image processing",
                        type=str,
                        default=constant.IMG_PROCESSING)
    parser.add_argument("--gpu",
                        help="Enable GPU mode",
                        action="store_true",
                        default=False)
    args = parser.parse_args()

    environment.setup(args)
    exist = lambda x: len(x) > 0 and path.exist(path.data(x, mkdir=False))

    if (args.tolabel):
        generator.tolabel()

    elif args.dataset is not None and exist(args.dataset):

        if (args.augmentation):
            generator.augmentation(args.augmentation)

        elif (args.train):
            nn.train()

        elif (args.test):
            nn.test()
    else:
        print("\n>> Dataset not found\n")
def main():
    dataset = "crackconcrete"
    train = True
    test = False
    environment.setup()
    exist = lambda x: len(x) > 0 and path.exist(path.data(x, mkdir=False))

    if dataset is not None and exist(dataset):
        if train:
            nn.train()
        elif test:
            nn.test()
    else:
        print("\n>> Dataset not found\n")
Esempio n. 3
0
def augmentation(n=1):
    batch_size = 1
    target_size = const.IMAGE_SIZE[:2]
    seed = int(np.random.rand(1)*100)

    train_path = path.data(const.DATASET, const.dn_TRAIN)

    image_folder = image_save_prefix = const.dn_IMAGE
    label_folder = label_save_prefix = const.dn_LABEL

    image_to_dir = path.dn_aug(const.dn_IMAGE)
    label_to_dir = path.dn_aug(const.dn_LABEL)
    
    image_gen = label_gen = ImageDataGenerator(
        rotation_range=0.2,
        fill_mode="constant",
        rescale = 1./255,
        width_shift_range=0.05,
        height_shift_range=0.05, 
        channel_shift_range=0.05,
        shear_range=0.05,
        zoom_range=0.05,
        vertical_flip=True,
        horizontal_flip=True)

    image_batch = image_gen.flow_from_directory(
        directory = train_path,
        classes = [image_folder],
        target_size = target_size,
        batch_size = batch_size,
        save_to_dir = image_to_dir,
        save_prefix = image_save_prefix,
        seed = seed)

    label_batch = label_gen.flow_from_directory(
        directory = train_path,
        classes = [label_folder],
        target_size = target_size,
        batch_size = batch_size,
        save_to_dir = label_to_dir,
        save_prefix = label_save_prefix,
        seed = seed)

    for i, (_,_)  in enumerate(zip(image_batch, label_batch)):
        if (i >= n-1): break
Esempio n. 4
0
def fetchFromPath(origin, suborigin=""):
    p = os.path.join(path.data(), origin)
    dt_dir = os.path.join(p, suborigin)
    data = listFolder(dt_dir, origin)
    data[0] = fetchFromArray(data[0])
    return data