Exemple #1
0
def test_get_keras_submodule_errors(monkeypatch):
    with pytest.raises(ImportError):
        keras_preprocessing.get_keras_submodule('something')

    monkeypatch.setattr(keras_preprocessing, '_KERAS_BACKEND', None)
    with pytest.raises(ImportError):
        keras_preprocessing.get_keras_submodule('backend')

    with pytest.raises(ImportError):
        keras_preprocessing.get_keras_submodule('utils')
Exemple #2
0
"""Utilities for real-time data augmentation on image data.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import threading
import numpy as np
from keras_preprocessing import get_keras_submodule

try:
    IteratorType = get_keras_submodule('utils').Sequence
except ImportError:
    IteratorType = object

from .utils import (array_to_img, img_to_array, load_img)
from .affine_transformations import random_crop, center_crop, rotate_random_zoom_crop


class Iterator(IteratorType):
    """Base class for image data iterators.

    Every `Iterator` must implement the `_get_batches_of_transformed_samples`
    method.

    # Arguments
        n: Integer, total number of samples in the dataset to loop over.
        batch_size: Integer, size of a batch.
        shuffle: Boolean, whether to shuffle the data between epochs.
        seed: Random seeding for data shuffling.
    """
Exemple #3
0
import sys
import multiprocessing.pool
import numpy as np
import keras
import keras.preprocessing.image
from keras_preprocessing.image import Iterator, load_img, img_to_array
from keras_preprocessing import get_keras_submodule

backend = get_keras_submodule('backend')
keras_utils = get_keras_submodule('utils')

try:
    from PIL import ImageEnhance
    from PIL import Image as pil_image
except ImportError:
    pil_image = None
    ImageEnhance = None

try:
    import scipy
except ImportError:
    scipy = None

if pil_image is not None:
    _PIL_INTERPOLATION_METHODS = {
        'nearest': pil_image.NEAREST,
        'bilinear': pil_image.BILINEAR,
        'bicubic': pil_image.BICUBIC,
    }
    # These methods were only introduced in version 3.4.0 (2016).
    if hasattr(pil_image, 'HAMMING'):
Exemple #4
0
import os
import numpy as np
import PIL

from keras_preprocessing.image import ImageDataGenerator, DirectoryIterator, array_to_img, load_img, img_to_array
from keras_preprocessing import get_keras_submodule

backend = get_keras_submodule('backend')


class AugmentedDirectoryIterator(DirectoryIterator):
    '''

    AugmentedDirectoryIterator inherits from DirectoryIterator:
    (https://github.com/keras-team/keras-preprocessing/blob/master/keras_preprocessing/image.py#L1811)

    This implementation adds the functionality of computing multiple crops following the work Going Deeper with
    Convolutions (https://arxiv.org/pdf/1409.4842.pdf) and allowing the use of transforms on such crops.

    It includes the addition of data_augmentation as an argument. It is a dictionary consisting of 3 elements:

    - 'scale_sizes': 'default' (4 similar scales to Original paper) or a list of sizes. Each scaled image then
    will be cropped into three square parts. For each square, we then take the 4 corners and the center "target_size"
    crop as well as the square resized to "target_size".
    - 'transforms': list of transforms to apply to these crops in addition to not
    applying any transform ('horizontal_flip', 'vertical_flip', 'rotate_90', 'rotate_180', 'rotate_270' are
    supported now).
    - 'crop_original': 'center_crop' mode allows to center crop the original image prior do the rest of transforms,
    scalings + croppings.

    If 'scale_sizes' is None the image will be resized to "target_size" and transforms will be applied over that image.
Exemple #5
0
def test_get_keras_submodule(monkeypatch):
    monkeypatch.setattr(keras_preprocessing, '_KERAS_BACKEND', 'backend')
    assert 'backend' == keras_preprocessing.get_keras_submodule('backend')
    monkeypatch.setattr(keras_preprocessing, '_KERAS_UTILS', 'utils')
    assert 'utils' == keras_preprocessing.get_keras_submodule('utils')
"""Utilities for real-time data augmentation on image data.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import threading
import numpy as np
from keras_preprocessing import get_keras_submodule

try:
    IteratorType = get_keras_submodule('utils').Sequence
except ImportError:
    IteratorType = object

from .utils import (array_to_img,
                    img_to_array,
                    load_img)


class Iterator(IteratorType):
    """Base class for image data iterators.

    Every `Iterator` must implement the `_get_batches_of_transformed_samples`
    method.

    # Arguments
        n: Integer, total number of samples in the dataset to loop over.
        batch_size: Integer, size of a batch.
        shuffle: Boolean, whether to shuffle the data between epochs.
        seed: Random seeding for data shuffling.