コード例 #1
0
ファイル: transformers.py プロジェクト: andrewheusser/pliers
    def transform(self, stims, *args, **kwargs):

        if isinstance(stims, string_types):
            stims = load_stims(stims)

        # If stims is a CompoundStim and the Transformer is expecting a single
        # input type, extract all matching stims
        if isinstance(stims, CompoundStim) and not isinstance(
                self._input_type, tuple):
            stims = stims.get_stim(self._input_type, return_all=True)
            if not stims:
                raise ValueError("No stims of class %s found in the provided"
                                 "CompoundStim instance." % self._input_type)

        # If stims is an iterable, naively loop over elements, removing
        # invalid results if needed
        if isiterable(stims):
            iters = self._iterate(stims, *args, **kwargs)
            if config.drop_bad_extractor_results:
                iters = (i for i in iters if i is not None)
            return progress_bar_wrapper(iters, desc='Stim')

        # Validate stim, and then either pass it directly to the Transformer
        # or, if a conversion occurred, recurse.
        else:
            validated_stim = self._validate(stims)
            # If a conversion occurred during validation, we recurse
            if stims is not validated_stim:
                return self.transform(validated_stim, *args, **kwargs)
            else:
                result = self._transform(validated_stim, *args, **kwargs)
                result = _log_transformation(validated_stim, result, self)
                if isgenerator(result):
                    result = list(result)
                return result
コード例 #2
0
ファイル: transformers.py プロジェクト: andrewheusser/pliers
 def wrapper(self, stim, *args, **kwargs):
     use_cache = config.cache_transformers and isinstance(stim, Stim)
     if use_cache:
         key = hash((hash(self), id(stim)))
         if key in _cache:
             return _cache[key]
     result = transform(self, stim, *args, **kwargs)
     if use_cache:
         if isgenerator(result):
             result = list(result)
         _cache[key] = result
     return result
コード例 #3
0
 def wrapper(self, stim, *args, **kwargs):
     use_cache = config.get_option('cache_transformers') \
         and isinstance(stim, (Stim, string_types))
     if use_cache:
         key = hash((hash(self), hash(stim)))
         if key in _cache:
             return _cache[key]
     result = transform(self, stim, *args, **kwargs)
     if use_cache:
         if isgenerator(result):
             result = list(result)
         _cache[key] = result
     return result
コード例 #4
0
ファイル: graph.py プロジェクト: undarmaa/pliers
    def run_node(self, node, stim):

        if isinstance(node, string_types):
            node = self.nodes[node]

        result = node.transformer.transform(stim)
        if isinstance(node.transformer, Extractor):
            return listify(result)

        stim = result
        # If result is a generator, the first child will destroy the
        # iterable, so cache via list conversion
        if len(node.children) > 1 and isgenerator(stim):
            stim = list(stim)
        return list(chain(*[self.run_node(c, stim) for c in node.children]))
コード例 #5
0
    def run_node(self, node, stim):
        ''' Executes the Transformer at a specific node.

        Args:
            node (str, Node): If a string, the name of the Node in the current
                Graph. Otherwise the Node instance to execute.
            stim (str, stim, list): Any valid input to the Transformer stored
                at the target node.
        '''
        if isinstance(node, string_types):
            node = self.nodes[node]

        result = node.transformer.transform(stim)
        if node.is_leaf():
            return listify(result)

        stim = result
        # If result is a generator, the first child will destroy the
        # iterable, so cache via list conversion
        if len(node.children) > 1 and isgenerator(stim):
            stim = list(stim)
        return list(chain(*[self.run_node(c, stim) for c in node.children]))
コード例 #6
0
ファイル: graph.py プロジェクト: tyarkoni/featureX
    def run_node(self, node, stim):
        ''' Executes the Transformer at a specific node.

        Args:
            node (str, Node): If a string, the name of the Node in the current
                Graph. Otherwise the Node instance to execute.
            stim (str, stim, list): Any valid input to the Transformer stored
                at the target node.
        '''
        if isinstance(node, string_types):
            node = self.nodes[node]

        result = node.transformer.transform(stim)
        if node.is_leaf():
            return listify(result)

        stim = result
        # If result is a generator, the first child will destroy the
        # iterable, so cache via list conversion
        if len(node.children) > 1 and isgenerator(stim):
            stim = list(stim)
        return list(chain(*[self.run_node(c, stim) for c in node.children]))
コード例 #7
0
ファイル: base.py プロジェクト: vishalbelsare/pliers
    def _iterate(self, stims, validation='strict', *args, **kwargs):
        batches = batch_iterable(stims, self._batch_size)
        results = []
        for batch in progress_bar_wrapper(batches):
            use_cache = config.get_option('cache_transformers')
            target_inds = {}
            non_cached = []
            for stim in batch:
                key = hash((hash(self), hash(stim)))
                # If using the cache, only transform stims that aren't in the
                # cache and haven't already appeared in the batch
                if not (use_cache and (key in _cache or key in target_inds)):
                    target_inds[key] = len(non_cached)
                    non_cached.append(stim)

            # _transform will likely fail if given an empty list
            if len(non_cached) > 0:
                batch_results = self._transform(non_cached, *args, **kwargs)
            else:
                batch_results = []

            for i, stim in enumerate(batch):
                key = hash((hash(self), hash(stim)))
                # Use the target index to get the result from batch_results
                if key in target_inds:
                    result = batch_results[target_inds[key]]
                    result = _log_transformation(stim, result, self)
                    self._propagate_context(stim, result)
                    if use_cache:
                        if isgenerator(result):
                            result = list(result)
                        _cache[key] = result
                    results.append(result)
                # Otherwise, the result should be in the cache
                else:
                    results.append(_cache[key])
        return results
コード例 #8
0
 def transform(self, stim, *args, **kwargs):
     result = super(Extractor, self).transform(stim, *args, **kwargs)
     return list(result) if isgenerator(result) else result
コード例 #9
0
ファイル: base.py プロジェクト: vishalbelsare/pliers
    def transform(self, stims, validation='strict', *args, **kwargs):
        ''' Executes the transformation on the passed stim(s).

        Args:
            stims (str, Stim, list): One or more stimuli to process. Must be
                one of:

                    - A string giving the path to a file that can be read in
                      as a Stim (e.g., a .txt file, .jpg image, etc.)
                    - A Stim instance of any type.
                    - An iterable of stims, where each element is either a
                      string or a Stim.

            validation (str): String specifying how validation errors should
                be handled. Must be one of:

                    - 'strict': Raise an exception on any validation error
                    - 'warn': Issue a warning for all validation errors
                    - 'loose': Silently ignore all validation errors

            args: Optional positional arguments to pass onto the internal
                _transform call.
            kwargs: Optional positional arguments to pass onto the internal
                _transform call.
        '''

        if isinstance(stims, str):
            stims = load_stims(stims)

        # If stims is a CompoundStim and the Transformer is expecting a single
        # input type, extract all matching stims
        if isinstance(stims, CompoundStim) and not isinstance(
                self._input_type, tuple):
            stims = stims.get_stim(self._input_type, return_all=True)
            if not stims:
                raise ValueError("No stims of class %s found in the provided"
                                 "CompoundStim instance." % self._input_type)

        # If stims is an iterable, naively loop over elements, removing
        # invalid results if needed
        if isiterable(stims):
            iters = self._iterate(stims,
                                  validation=validation,
                                  *args,
                                  **kwargs)
            if config.get_option('drop_bad_extractor_results'):
                iters = (i for i in iters if i is not None)
            iters = progress_bar_wrapper(iters, desc='Stim')
            return set_iterable_type(iters)

        # Validate stim, and then either pass it directly to the Transformer
        # or, if a conversion occurred, recurse.
        else:
            try:
                validated_stim = self._validate(stims)
            except TypeError as err:
                if validation == 'strict':
                    raise err
                elif validation == 'warn':
                    logging.warning(str(err))
                    return
                elif validation == 'loose':
                    return
            # If a conversion occurred during validation, we recurse
            if stims is not validated_stim:
                return self.transform(validated_stim, *args, **kwargs)
            else:
                result = self._transform(validated_stim, *args, **kwargs)
                result = _log_transformation(validated_stim, result, self)
                if isgenerator(result):
                    result = list(result)
                self._propagate_context(validated_stim, result)
                return result
コード例 #10
0
ファイル: base.py プロジェクト: tyarkoni/featureX
 def transform(self, stim, *args, **kwargs):
     result = super(Extractor, self).transform(stim, *args, **kwargs)
     return list(result) if isgenerator(result) else result