Esempio n. 1
0
 def _iterate(self, stims, validation='strict', *args, **kwargs):
     batches = batch_iterable(stims, self._batch_size)
     results = []
     for batch in progress_bar_wrapper(batches):
         res = self._transform(batch, *args, **kwargs)
         for i, stim in enumerate(batch):
             res[i] = _log_transformation(stim, res[i], self)
             self._propagate_context(stim, res[i])
         results.extend(res)
     return results
Esempio n. 2
0
    def transform(self, stims, validation='strict', *args, **kwargs):

        if isinstance(stims, string_types):
            stims = load_stims(stims)

        # If stims is a CompoundStim and the Transformer is expecting a single
        # input type, extract all matching stims
        if isinstance(stims, CompoundStim) and not isinstance(
                self._input_type, tuple):
            stims = stims.get_stim(self._input_type, return_all=True)
            if not stims:
                raise ValueError("No stims of class %s found in the provided"
                                 "CompoundStim instance." % self._input_type)

        # If stims is an iterable, naively loop over elements, removing
        # invalid results if needed
        if isiterable(stims):
            iters = self._iterate(stims, *args, **kwargs)
            if config.drop_bad_extractor_results:
                iters = (i for i in iters if i is not None)
            return progress_bar_wrapper(iters, desc='Stim')

        # Validate stim, and then either pass it directly to the Transformer
        # or, if a conversion occurred, recurse.
        else:
            try:
                validated_stim = self._validate(stims)
            except TypeError as err:
                if validation == 'strict':
                    raise err
                elif validation == 'warn':
                    logging.warn(str(err))
                    return
                elif validation == 'loose':
                    return
            # If a conversion occurred during validation, we recurse
            if stims is not validated_stim:
                return self.transform(validated_stim, *args, **kwargs)
            else:
                result = self._transform(validated_stim, *args, **kwargs)
                result = _log_transformation(validated_stim, result, self)
                if isgenerator(result):
                    result = list(result)
                return result
Esempio n. 3
0
    def _convert(self, video):
        if not hasattr(video, "frame_index"):
            frame_index = range(video.n_frames)
        else:
            frame_index = video.frame_index

        if self.every is not None:
            new_idx = range(video.n_frames)[::self.every]
        elif self.hertz is not None:
            interval = int(video.fps / self.hertz)
            new_idx = range(video.n_frames)[::interval]
        elif self.top_n is not None:
            import cv2
            diffs = []
            for i, img in enumerate(video.frames):
                if i == 0:
                    last = img
                    continue
                diffs.append(sum(cv2.sumElems(cv2.absdiff(last, img))))
                last = img
            new_idx = sorted(range(len(diffs)), key=lambda i: diffs[i], reverse=True)[
                :self.top_n]

        frame_index = sorted(list(set(frame_index).intersection(new_idx)))

        # Construct new VideoFrameStim for each frame index
        onsets = [frame_num * (1. / video.fps) for frame_num in frame_index]
        frames = []
        for i, f in progress_bar_wrapper(enumerate(frame_index),
                                         desc='Video frame',
                                         total=len(frame_index)):
            if f != frame_index[-1]:
                dur = onsets[i+1] - onsets[i]
            else:
                dur = (video.n_frames / video.fps) - onsets[i]

            elem = VideoFrameStim(video=video, frame_num=f, duration=dur)
            frames.append(elem)

        return DerivedVideoStim(filename=video.filename, frames=frames,
                                frame_index=frame_index)
Esempio n. 4
0
    def _iterate(self, stims, validation='strict', *args, **kwargs):
        batches = batch_iterable(stims, self._batch_size)
        results = []
        for batch in progress_bar_wrapper(batches):
            use_cache = config.get_option('cache_transformers')
            target_inds = {}
            non_cached = []
            for stim in batch:
                key = hash((hash(self), hash(stim)))
                # If using the cache, only transform stims that aren't in the
                # cache and haven't already appeared in the batch
                if not (use_cache and (key in _cache or key in target_inds)):
                    target_inds[key] = len(non_cached)
                    non_cached.append(stim)

            # _transform will likely fail if given an empty list
            if len(non_cached) > 0:
                batch_results = self._transform(non_cached, *args, **kwargs)
            else:
                batch_results = []

            for i, stim in enumerate(batch):
                key = hash((hash(self), hash(stim)))
                # Use the target index to get the result from batch_results
                if key in target_inds:
                    result = batch_results[target_inds[key]]
                    result = _log_transformation(stim, result, self)
                    self._propagate_context(stim, result)
                    if use_cache:
                        if isgenerator(result):
                            result = list(result)
                        _cache[key] = result
                    results.append(result)
                # Otherwise, the result should be in the cache
                else:
                    results.append(_cache[key])
        return results
Esempio n. 5
0
    def transform(self, stims, validation='strict', *args, **kwargs):
        ''' Executes the transformation on the passed stim(s).

        Args:
            stims (str, Stim, list): One or more stimuli to process. Must be
                one of:

                    - A string giving the path to a file that can be read in
                      as a Stim (e.g., a .txt file, .jpg image, etc.)
                    - A Stim instance of any type.
                    - An iterable of stims, where each element is either a
                      string or a Stim.

            validation (str): String specifying how validation errors should
                be handled. Must be one of:

                    - 'strict': Raise an exception on any validation error
                    - 'warn': Issue a warning for all validation errors
                    - 'loose': Silently ignore all validation errors

            args: Optional positional arguments to pass onto the internal
                _transform call.
            kwargs: Optional positional arguments to pass onto the internal
                _transform call.
        '''

        if isinstance(stims, str):
            stims = load_stims(stims)

        # If stims is a CompoundStim and the Transformer is expecting a single
        # input type, extract all matching stims
        if isinstance(stims, CompoundStim) and not isinstance(
                self._input_type, tuple):
            stims = stims.get_stim(self._input_type, return_all=True)
            if not stims:
                raise ValueError("No stims of class %s found in the provided"
                                 "CompoundStim instance." % self._input_type)

        # If stims is an iterable, naively loop over elements, removing
        # invalid results if needed
        if isiterable(stims):
            iters = self._iterate(stims,
                                  validation=validation,
                                  *args,
                                  **kwargs)
            if config.get_option('drop_bad_extractor_results'):
                iters = (i for i in iters if i is not None)
            iters = progress_bar_wrapper(iters, desc='Stim')
            return set_iterable_type(iters)

        # Validate stim, and then either pass it directly to the Transformer
        # or, if a conversion occurred, recurse.
        else:
            try:
                validated_stim = self._validate(stims)
            except TypeError as err:
                if validation == 'strict':
                    raise err
                elif validation == 'warn':
                    logging.warning(str(err))
                    return
                elif validation == 'loose':
                    return
            # If a conversion occurred during validation, we recurse
            if stims is not validated_stim:
                return self.transform(validated_stim, *args, **kwargs)
            else:
                result = self._transform(validated_stim, *args, **kwargs)
                result = _log_transformation(validated_stim, result, self)
                if isgenerator(result):
                    result = list(result)
                self._propagate_context(validated_stim, result)
                return result