def test_aggregate_skew(): data_target = numpy.array( [ [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], ] ).T container = FeatureContainer( data=data ) agg = Aggregator( win_length_frames=2, hop_length_frames=1, recipe=['skew'] ) data_aggregated = agg.aggregate(data=container) numpy.testing.assert_array_equal(data_target, data_aggregated.data)
def test_aggregate_mean(): data_target = numpy.array( [ [0, 0], [0.5, 0.5], [1.5, 1.5], [2.5, 2.5], [3.5, 3.5], [4.5, 4.5], [5.5, 5.5], [6.5, 6.5], [7.5, 7.5], [8.5, 8.5], [9.5, 9.5], ] ).T container = FeatureContainer( data=data ) agg = Aggregator( win_length_frames=2, hop_length_frames=1, recipe=['mean'] ) data_aggregated = agg.aggregate(data=container) numpy.testing.assert_array_equal(data_target, data_aggregated.data)
def test_aggregate_flatten(): data_target = numpy.array( [ [0, 0, 0, 0], [0, 0, 1, 1], [1, 1, 2, 2], [2, 2, 3, 3], [3, 3, 4, 4], [4, 4, 5, 5], [5, 5, 6, 6], [6, 6, 7, 7], [7, 7, 8, 8], [8, 8, 9, 9], [9, 9, 10, 10], ] ).T container = FeatureContainer( data=data ) agg = Aggregator( win_length_frames=2, hop_length_frames=1, recipe=['flatten'] ) data_aggregated = agg.aggregate(data=container) numpy.testing.assert_array_equal(data_target, data_aggregated.data)
def test_save(): data_target = numpy.array( [ [0, 0, 0, 0], [0, 0, 1, 1], [1, 1, 2, 2], [2, 2, 3, 3], [3, 3, 4, 4], [4, 4, 5, 5], [5, 5, 6, 6], [6, 6, 7, 7], [7, 7, 8, 8], [8, 8, 9, 9], [9, 9, 10, 10], ] ).T container = FeatureContainer( data=data ) tmp = tempfile.NamedTemporaryFile('r+', suffix='.cpickle', dir=tempfile.gettempdir(), delete=False) try: agg = Aggregator( win_length_frames=2, hop_length_frames=1, recipe=['flatten'] ).save(filename=tmp.name).load() data_aggregated = agg.aggregate(data=container) numpy.testing.assert_array_equal(data_target, data_aggregated.data) finally: try: tmp.close() os.unlink(tmp.name) except: pass
class RepositoryAggregationProcessor(Processor): """Data aggregation processor""" input_type = ProcessingChainItemType.DATA_REPOSITORY #: Input data type output_type = ProcessingChainItemType.DATA_REPOSITORY #: Output data type def __init__(self, win_length_frames=10, hop_length_frames=1, recipe=None, **kwargs): """Constructor Parameters ---------- recipe : list of dict or list of str Aggregation recipe, supported methods [mean, std, cov, kurtosis, skew, flatten]. win_length_frames : int Window length in feature frames hop_length_frames : int Hop length in feature frames """ if recipe is None and kwargs.get('aggregation_recipe', None) is not None: recipe = kwargs.get('aggregation_recipe', None) # Inject initialization parameters back to kwargs kwargs.update({ 'win_length_frames': win_length_frames, 'hop_length_frames': hop_length_frames, 'recipe': recipe }) # Run super init to call init of mixins too super(RepositoryAggregationProcessor, self).__init__(**kwargs) self.aggregator = Aggregator(**self.init_parameters) def process(self, data=None, store_processing_chain=False, **kwargs): """Process Parameters ---------- data : DataRepository Data store_processing_chain : bool Store processing chain to data container returned Default value False Returns ------- DataMatrix3DContainer """ if isinstance(data, RepositoryContainer): # Label exists in data repository for label in data: for stream_id in data[label]: # Do processing data.set_container(label=label, stream_id=stream_id, container=self.aggregator.aggregate( data=data.get_container( label=label, stream_id=stream_id), **kwargs)) if store_processing_chain: # Get processing chain item processing_chain_item = self.get_processing_chain_item() # Push chain item into processing chain stored in the container data.processing_chain.push_processor(**processing_chain_item) return data else: message = '{name}: Wrong input data type, type required [{input_type}].'.format( name=self.__class__.__name__, input_type=self.input_type) self.logger.exception(message) raise ValueError(message)
class AggregationProcessor(Processor): """Data aggregation processor""" input_type = ProcessingChainItemType.DATA_CONTAINER #: Input data type output_type = ProcessingChainItemType.DATA_CONTAINER #: Output data type def __init__(self, win_length_frames=10, hop_length_frames=1, recipe=None, **kwargs): """Constructor Parameters ---------- recipe : list of dict or list of str Aggregation recipe, supported methods [mean, std, cov, kurtosis, skew, flatten]. win_length_frames : int Window length in feature frames hop_length_frames : int Hop length in feature frames """ if recipe is None and kwargs.get('aggregation_recipe', None) is not None: recipe = kwargs.get('aggregation_recipe', None) # Inject initialization parameters back to kwargs kwargs.update({ 'win_length_frames': win_length_frames, 'hop_length_frames': hop_length_frames, 'recipe': recipe }) # Run super init to call init of mixins too super(AggregationProcessor, self).__init__(**kwargs) self.aggregator = Aggregator(**self.init_parameters) def process(self, data=None, store_processing_chain=False, **kwargs): """Process Parameters ---------- data : DataContainer Data to be aggregated store_processing_chain : bool Store processing chain to data container returned Default value False Returns ------- DataContainer """ from dcase_util.containers import ContainerMixin if isinstance(data, ContainerMixin): # Do processing container = self.aggregator.aggregate(data=data, **kwargs) if store_processing_chain: # Get processing chain item processing_chain_item = self.get_processing_chain_item() # Update current processing parameters into chain item processing_chain_item.update({'process_parameters': kwargs}) # Push chain item into processing chain stored in the container container.processing_chain.push_processor( **processing_chain_item) return container else: message = '{name}: Wrong input data type, type required [{input_type}].'.format( name=self.__class__.__name__, input_type=self.input_type) self.logger.exception(message) raise ValueError(message)
def test_aggregate(): data_target = numpy.array( [ [0.5, 0.5], [1.5, 1.5], [2.5, 2.5], [3.5, 3.5], [4.5, 4.5], [5.5, 5.5], [6.5, 6.5], [7.5, 7.5], [8.5, 8.5], [9.5, 9.5], ] ).T container = FeatureContainer( data=data ) agg = Aggregator( win_length_frames=2, hop_length_frames=1, recipe=['mean'], center=False, padding=False, ) data_aggregated = agg.aggregate(data=container) numpy.testing.assert_array_equal(data_target, data_aggregated.data) data_target = numpy.array( [ [5.0, 5.0] ] ).T container = FeatureContainer( data=data ) agg = Aggregator( win_length_frames=11, hop_length_frames=11, recipe=['mean'], center=False, padding=False, ) data_aggregated = agg.aggregate(data=container) numpy.testing.assert_array_equal(data_target, data_aggregated.data) data_target = numpy.array( [ [1.5, 1.5], [5.5, 5.5], ] ).T container = FeatureContainer( data=data ) agg = Aggregator( win_length_frames=4, hop_length_frames=4, recipe=['mean'], center=False, padding=False, ) data_aggregated = agg.aggregate(data=container) numpy.testing.assert_array_equal(data_target, data_aggregated.data) data_target = numpy.array( [ [3.5, 3.5], [7.5, 7.5], ] ).T container = FeatureContainer( data=data ) agg = Aggregator( win_length_frames=4, hop_length_frames=4, recipe=['mean'], center=True, padding=False, ) data_aggregated = agg.aggregate(data=container) numpy.testing.assert_array_equal(data_target, data_aggregated.data) data_target = numpy.array( [ [0.25, 0.25], [3.5, 3.5], [7.5, 7.5], ] ).T container = FeatureContainer( data=data ) agg = Aggregator( win_length_frames=4, hop_length_frames=4, recipe=['mean'], center=True, padding=True, ) data_aggregated = agg.aggregate(data=container) numpy.testing.assert_array_equal(data_target, data_aggregated.data)