コード例 #1
0
ファイル: ilsvrc2010.py プロジェクト: Afrik/fuel
def process_other_set(hdf5_file, which_set, image_archive, patch_archive,
                      groundtruth, offset):
    """Process the validation or test set.

    Parameters
    ----------
    hdf5_file : :class:`h5py.File` instance
        HDF5 file handle to which to write. Assumes `features`, `targets`
        and `filenames` already exist and have first dimension larger than
        `sum(images_per_class)`.
    which_set : str
        Which set of images is being processed. One of 'train', 'valid',
        'test'.  Used for extracting the appropriate images from the patch
        archive.
    image_archive : str or file-like object
        The filename or file-handle for the TAR archive containing images.
    patch_archive : str or file-like object
        Filename or file handle for the TAR archive of patch images.
    groundtruth : iterable
        Iterable container containing scalar 0-based class index for each
        image, sorted by filename.
    offset : int
        The offset in the HDF5 datasets at which to start writing.

    """
    producer = partial(other_set_producer, image_archive=image_archive,
                       patch_archive=patch_archive,
                       groundtruth=groundtruth, which_set=which_set)
    consumer = partial(image_consumer, hdf5_file=hdf5_file,
                       num_expected=len(groundtruth), offset=offset)
    producer_consumer(producer, consumer)
コード例 #2
0
ファイル: ilsvrc2010.py プロジェクト: Afrik/fuel
def process_train_set(hdf5_file, train_archive, patch_archive, n_train,
                      wnid_map, shuffle_seed=None):
    """Process the ILSVRC2010 training set.

    Parameters
    ----------
    hdf5_file : :class:`h5py.File` instance
        HDF5 file handle to which to write. Assumes `features`, `targets`
        and `filenames` already exist and have first dimension larger than
        `n_train`.
    train_archive :  str or file-like object
        Filename or file handle for the TAR archive of training images.
    patch_archive :  str or file-like object
        Filename or file handle for the TAR archive of patch images.
    n_train : int
        The number of items in the training set.
    wnid_map : dict
        A dictionary mapping WordNet IDs to class indices.
    shuffle_seed : int or sequence, optional
        Seed for a NumPy random number generator that permutes the
        training set on disk. If `None`, no permutation is performed
        (this is the default).

    """
    producer = partial(train_set_producer, train_archive=train_archive,
                       patch_archive=patch_archive, wnid_map=wnid_map)
    consumer = partial(image_consumer, hdf5_file=hdf5_file,
                       num_expected=n_train, shuffle_seed=shuffle_seed)
    producer_consumer(producer, consumer)
コード例 #3
0
ファイル: ilsvrc2010.py プロジェクト: cjbear/BackPocketSocial
def process_other_set(hdf5_file, which_set, image_archive, patch_archive,
                      groundtruth, offset):
    """Process the validation or test set.

    Parameters
    ----------
    hdf5_file : :class:`h5py.File` instance
        HDF5 file handle to which to write. Assumes `features`, `targets`
        and `filenames` already exist and have first dimension larger than
        `sum(images_per_class)`.
    which_set : str
        Which set of images is being processed. One of 'train', 'valid',
        'test'.  Used for extracting the appropriate images from the patch
        archive.
    image_archive : str or file-like object
        The filename or file-handle for the TAR archive containing images.
    patch_archive : str or file-like object
        Filename or file handle for the TAR archive of patch images.
    groundtruth : iterable
        Iterable container containing scalar 0-based class index for each
        image, sorted by filename.
    offset : int
        The offset in the HDF5 datasets at which to start writing.

    """
    producer = partial(other_set_producer,
                       image_archive=image_archive,
                       patch_archive=patch_archive,
                       groundtruth=groundtruth,
                       which_set=which_set)
    consumer = partial(image_consumer,
                       hdf5_file=hdf5_file,
                       num_expected=len(groundtruth),
                       offset=offset)
    producer_consumer(producer, consumer)
コード例 #4
0
def process_train_set(hdf5_file, train_archive, patch_archive, n_train,
                      wnid_map, shuffle_seed=None):
    """Process the ILSVRC2010 training set.

    Parameters
    ----------
    hdf5_file : :class:`h5py.File` instance
        HDF5 file handle to which to write. Assumes `features`, `targets`
        and `filenames` already exist and have first dimension larger than
        `n_train`.
    train_archive :  str or file-like object
        Filename or file handle for the TAR archive of training images.
    patch_archive :  str or file-like object
        Filename or file handle for the TAR archive of patch images.
    n_train : int
        The number of items in the training set.
    wnid_map : dict
        A dictionary mapping WordNet IDs to class indices.
    shuffle_seed : int or sequence, optional
        Seed for a NumPy random number generator that permutes the
        training set on disk. If `None`, no permutation is performed
        (this is the default).

    """
    producer = partial(train_set_producer, train_archive=train_archive,
                       patch_archive=patch_archive, wnid_map=wnid_map)
    consumer = partial(image_consumer, hdf5_file=hdf5_file,
                       num_expected=n_train, shuffle_seed=shuffle_seed)
    producer_consumer(producer, consumer)
コード例 #5
0
ファイル: test_utils.py プロジェクト: ixtel/attention-lvcsr
def test_producer_consumer():
    assert (producer_consumer(partial(send_integers, n=2000),
                              receive_integers)
            == sum(i ** 2 for i in range(2000)))
コード例 #6
0
ファイル: test_utils.py プロジェクト: critias/fuel
def test_producer_consumer():
    assert (producer_consumer(partial(send_integers, n=2000),
                              receive_integers) == sum(i**2
                                                       for i in range(2000)))