def test_get_refinery(self):
     ppln1 = ppln.Pipeline(factory=self.factory, config=self.config)
     print '**6600** ppln1 =', ppln1
     ppln2 = ppln.Pipeline(factory=self.factory, config=self.config)
     print '**6600** ppln2 =', ppln2
     ppln3 = ppln.Pipeline(factory=self.factory, config=self.config)
     print '**6600** ppln3 =', ppln3
     ppln3.pipeline = ppln2
     ppln2.pipeline = ppln1
     self.assertEquals(ppln3.refinery, ppln1)
Example #2
0
 def test_essential_and_optional_keys(self):
     config = '''
     [--main--]
     ftype = demo 
     description = TO-DO: docstring
     keys1 = a1:joe, b2, c3, d4
     keys2 = w:10, x:0xFF, y:12.5, z:false, ghj:asdf, foo:none, baz:none
     
     [--route--]
     pass_through
     '''
     pipeline5 = ppln.Pipeline(factory=self.factory,
                               config=config,
                               baz='jim',
                               b2='bill',
                               c3=789,
                               d4='jane')
     self.assertEquals(pipeline5.a1, 'joe')
     self.assertEquals(pipeline5.b2, 'bill')
     self.assertEquals(pipeline5.c3, 789)
     self.assertEquals(pipeline5.d4, 'jane')
     self.assertEquals(pipeline5.w, 10)
     self.assertEquals(pipeline5.x, 255)
     self.assertEquals(pipeline5.y, 12.5)
     self.assertEquals(pipeline5.z, False)
     self.assertEquals(pipeline5.ghj, 'asdf')
Example #3
0
    def test_dynamic_filter_in_static_pipeline2(self):
        # Batch is set to be dynamic in config, using metaclass.
        # Possibly not so useful, since it is not reversible.
        config4 = '''\
        [--main--]
        ftype = demo_static_pipeline4
        description = Static pipeline with metaclass dynamic filter
        keys = foo, bar:55
        
        [py_set_param]
        print '**16140** Setting BATCH_SIZE to 2048'
        BATCH_SIZE = 2048
        
        [batch]
        dynamic = meta
        size = %BATCH_SIZE

        [--route--]
        py_set_param >>>
        batch
        '''
        pipeline26 = ppln.Pipeline(factory=self.factory,
                                   config=config4,
                                   foo='cyrus')
        self.assertEquals(pipeline26.foo, 'cyrus')
        self.assertEquals(pipeline26.bar, 55)
        batch_filter = pipeline26.getf('batch')
        # Dynamic value not yet set
        self.assertEquals(batch_filter.size, dfb.k_unset)
        # Send packet to activate py_set_param
        packet = dfb.DataPacket('lo')
        pipeline26.send(packet)
        self.assertEquals(batch_filter.size, 2048)
Example #4
0
 def test_variable_batch1(self):
     # Using Python embedded environment, change batch size from 1 to 6
     config = '''\
     [--main--]  
     ftype = variable_batch
     description = Test variable batch passing
 
     [py_init_batch_size]
     BATCH_SIZE = 1
     
     [py_set_batch_size]
     BATCH_SIZE += 1
     
     [batch]
     dynamic = true
     
     [--route--]
     py_init_batch_size >>>
     batch:%BATCH_SIZE >>>
     py_set_batch_size >>>
     sink
     '''
     ppln1 = ppln.Pipeline(factory=self.factory, config=config)
     source = 'Iamnotveryquickeating'
     packet1 = dfb.DataPacket(source)
     ppln1.send(packet1)
     ppln1.shut_down()
     print '**13120** results = ', '|'.join(ppln1.getf('sink').all_data)
     self.assertEquals('|'.join(ppln1.getf('sink').all_data),
                       'I|am|not|very|quick|eating')
     print '**13150** finished test_variable_batch1'
Example #5
0
    def test_dynamic_filter_in_static_pipeline1(self):
        # Batch is set to be dynamic in config
        config3 = '''\
        [--main--]
        ftype = demo_static_pipeline3
        description = Static pipeline with reversible dynamic filter
        keys = foo, bar:43
        
        [py_set_param]
        print '**16130** Setting BATCH_SIZE to 256'
        BATCH_SIZE = 256
        
        [batch]
        dynamic = true
        size = %BATCH_SIZE

        [--route--]
        py_set_param >>>
        batch
        '''
        pipeline25 = ppln.Pipeline(factory=self.factory,
                                   config=config3,
                                   foo='jim')
        self.assertEquals(pipeline25.foo, 'jim')
        self.assertEquals(pipeline25.bar, 43)
        batch_filter = pipeline25.getf('batch')
        # Dynamic value not yet set
        self.assertEquals(batch_filter.size, dfb.k_unset)
        # Send packet to activate py_set_param
        packet = dfb.DataPacket('hi')
        pipeline25.send(packet)
        self.assertEquals(batch_filter.size, 256)
Example #6
0
 def setUp(self):
     self.factory = ff.DemoFilterFactory()
     self.pipeline3 = ppln.Pipeline(factory=self.factory,
                                    config=self.config,
                                    foo=0,
                                    bar=45)
     self.packet1 = dfb.DataPacket(height=1)
     self.packet2 = dfb.DataPacket(height=2)
Example #7
0
 def test_static_pipeline(self):
     pipeline21 = ppln.Pipeline(factory=self.factory,
                                config=self.config1,
                                foo='jim',
                                dynamic=False)
     self.assertEquals(pipeline21.foo, 'jim')
     self.assertEquals(pipeline21.bar, 27)
     pipeline21.bar = 456
     self.assertEquals(pipeline21.bar, 456)
Example #8
0
 def setUp(self):
     self.factory = ff.DemoFilterFactory()
     self.pipeline1 = ppln.Pipeline(factory=self.factory,
                                    config=self.config)
     self.packet1 = dfb.DataPacket(height=1)
     self.packet2 = dfb.DataPacket(height=2)
     self.packet3 = dfb.DataPacket(height=3)
     self.packet4 = dfb.DataPacket(height=4)
     self.packet5 = dfb.DataPacket(height=5)
     self.packet6 = dfb.DataPacket(height=9)
Example #9
0
 def test_set_param_in_route_no_config(self):
     config = '''
     [--main--]
     ftype = testing
     description = TO-DO: docstring
 
     [--route--]
     batch:33 >>>
     sink
     '''
     pipeline1 = ppln.Pipeline(factory=self.factory, config=config)
     self.assertEquals(pipeline1.getf('batch').size, 33)
Example #10
0
 def test_pipeline_wrapped_tank_queue(self):
     pipeline = ppln.Pipeline(factory=self.factory,
                              config=self.config_normal)
     tank_queue = pipeline.getf('tank_queue')
     sink = pipeline.getf('sink')
     self.assertEqual(tank_queue.tank_size, 5)
     pipeline.send(self.packet1, self.packet2, self.packet3, self.packet4,
                   self.packet5)
     self.assertEqual(len(sink.results), 0)
     pipeline.send(self.packet6)
     self.assertEqual(len(sink.results), 1)  # sink now has a single packet
     self.assertEqual(sink.results[0],
                      self.packet1)  # first in is first out
Example #11
0
 def test_count_packet(self):
     ##        return # <<<<<<<<<<<<<<<<<<<<<<<<<<<<<  subframe_number ?? TO-DO
     config = '''
     [--main--]
     ftype = testing
     description = TO-DO: docstring
     
     [--route--]
     seq_packet:subframe_number >>>
     sink
     '''
     pipeline3 = ppln.Pipeline(factory=self.factory, config=config)
     id_giver = pipeline3.getf('seq_packet')
     self.assertEquals(id_giver.subframe_number, 0)
Example #12
0
 def test_type_set_in_config_overrides_name_interp(self):
     config = '''
     [--main--]
     ftype = testing
     description = TO-DO: docstring
     
     [batch_which_isnt_a_batch]
     ftype = sink
     
     [--route--]
     batch_which_isnt_a_batch
     '''
     pipeline32 = ppln.Pipeline(factory=self.factory, config=config)
     self.assertEquals(pipeline32.first_filter.ftype, 'sink')
Example #13
0
    def test_set_param_in_route_twice_same(self):

        config = '''
        [--main--]
        ftype = testing
        description = TO-DO: docstring

        [--route--]
        distill_header:100 >>>
            (batch:55)
        sink >>>
        batch:55
        '''
        pipeline1 = ppln.Pipeline(factory=self.factory, config=config)
        self.assertEquals(pipeline1.getf('batch').size, 55)
Example #14
0
    def test_dynamic_filter_in_static_pipeline3(self):
        # Batch is set to be reversibly dynamic in config
        config3 = '''\
        [--main--]
        ftype = demo_static_pipeline3
        description = Static pipeline with reversible dynamic filter
        keys = foo, bar:43
        #dynamic = true
        
        [py_set_param]
        print '**16130** Setting BATCH_SIZE to 256'
        BATCH_SIZE_SOMETHING = 256
        BATCH_SIZE_OTHER = 128
        
        
        [distill_something]
        ftype = distill_header
        dynamic = true
        
        [distill_other]
        ftype = distill_header
        dynamic = true


        [--route--]
        py_set_param >>>
        distill_something:%BATCH_SIZE_SOMETHING >>>
            (pass_through)
        distill_other:%BATCH_SIZE_OTHER >>>
            (pass_through)
        sink
        '''
        pipeline25 = ppln.Pipeline(factory=self.factory,
                                   config=config3,
                                   foo='jim')
        self.assertEquals(pipeline25.foo, 'jim')
        self.assertEquals(pipeline25.bar, 43)
        something_filter = pipeline25.getf('distill_something')
        other_filter = pipeline25.getf('distill_other')
        # Dynamic value not yet set
        self.assertEquals(something_filter.header_size, dfb.k_unset)
        # Send packet to activate py_set_param
        packet = dfb.DataPacket('hi')
        pipeline25.send(packet)
        self.assertEquals(something_filter.header_size, 256)
        self.assertEquals(other_filter.header_size, 128)
Example #15
0
    def test_set_param_in_route_once_with_extra_key_2(self):
        # "batch" occurs twice in the route, but only once with a value.
        # The once with a value should be taken, in which ever order they come.

        config = '''
        [--main--]
        ftype = testing
        description = TO-DO: docstring

        [--route--]
        distill_header:100 >>>
            (batch:77)
        sink >>>
        batch
        '''
        pipeline1 = ppln.Pipeline(factory=self.factory, config=config)
        self.assertEquals(pipeline1.getf('batch').size, 77)
Example #16
0
 def test_pipeline_with_keys(self):
     pipeline = ppln.Pipeline(factory=self.factory,
                              config=self.config_keys,
                              size=2)
     tank_queue = pipeline.getf('tank_queue')
     sink = pipeline.getf('sink')
     self.assertEqual(tank_queue.tank_size, 2)
     pipeline.send(self.packet1, self.packet2, self.packet3, self.packet4,
                   self.packet5)
     self.assertEqual(len(sink.results), 3)
     pipeline.send(self.packet6)
     self.assertEqual(len(sink.results), 4)  # sink now has 4 packets
     # test that the packets came out in the correct order...
     self.assertEqual(sink.results[0], self.packet1)
     self.assertEqual(sink.results[1], self.packet2)
     self.assertEqual(sink.results[2], self.packet3)
     self.assertEqual(sink.results[3], self.packet4)
Example #17
0
 def test_pipeline_reduce_expand(self):
     pipeline = ppln.Pipeline(factory=self.factory,
                              config=self.config_expand_reduce)
     tank_queue = pipeline.getf('tank_queue')
     sink = pipeline.getf('sink')
     self.assertEqual(tank_queue.tank_size, 5)
     pipeline.send(self.packet1, self.packet2, self.packet3)
     self.assertEqual(len(sink.results), 0)
     # reduce!
     tank_queue.tank_size = 0
     # ensure the packets are flushed from the tank queue
     self.assertEqual(len(sink.results), 3)
     pipeline.send(self.packet4)
     # ensure that the tank_queue does not store anything when it's size is 0
     self.assertEqual(len(sink.results), 4)
     # expand!
     tank_queue.tank_size = 10
     self.assertEqual(tank_queue.spare_capacity, 0)
     pipeline.send(self.packet5, self.packet6)
     self.assertEqual(len(sink.results), 4)
Example #18
0
    def test_set_param_in_route_and_config_same(self):
        # Previously setting a parameter in both config and route was an
        # error, but there are situations when it may be necessary. We have to
        # allow the same value because e.g. one "batch:40" filter may be
        # parsed twice, as a from and a to filter. It doesn't really matter,
        # as long as the values are the same.
        config = '''
        [--main--]
        ftype = testing
        description = TO-DO: docstring

        [batch]
        size = 44

        [--route--]
        batch:44 >>>
        sink
        '''
        pipeline1 = ppln.Pipeline(factory=self.factory, config=config)
        self.assertEquals(pipeline1.getf('batch').size, 44)
Example #19
0
 def test_variable_batch2(self):
     # Using Python embedded environment, read batch size from data
     config = '''\
     [--main--]  
     ftype = variable_batch
     description = Test variable batch passing
     ##dynamic = true
 
     [py_init_batch_size]
     BATCH_SIZE = 2
     READING_SIZE = True
     
     [py_read_batch_size]
     if READING_SIZE:
         BATCH_SIZE = int(packet.data)
         READING_SIZE = False
         packet.fork_dest = 'branch'
     else:
         init_batch_size()
         
     [batch]
     dynamic = true
     size = %BATCH_SIZE
     
     [--route--]
     py_init_batch_size >>>
     #batch:%BATCH_SIZE >>>
     batch >>>
     py_read_batch_size >>>
     sink
     '''
     ppln2 = ppln.Pipeline(factory=self.factory, config=config)
     source = 'Mary had a little lamb; its fleece was white as snow.'
     source2 = ''.join(('%2.2d' % len(x) + x) for x in source.split())
     print '**13125** source2 =', source2
     packet2 = dfb.DataPacket(source2)
     ppln2.send(packet2)
     ppln2.shut_down()
     print '**13120** results = ', '|'.join(ppln2.getf('sink').all_data)
     self.assertEquals(' '.join(ppln2.getf('sink').all_data), source)
     print '**13150** finished test_variable_batch2'
    def test_filter1(self):
        import filterpype.filter_factory as ff
        factory = ff.DemoFilterFactory()

        config = '''
        [--main--]
        ftype = demo
        description = TO-DO: docstring
        
        [--route--]
        batch:3 >>>
        reverse_string >>>
        sink
        '''

        pipeline1 = ppln.Pipeline(factory=factory, config=config)
        # Manual connection for now
        pgf = pipeline1.get_filter
        pipeline1.first_filter = pgf('batch')
        ##        pgf('batch').next_filter = pgf('reverse_string')
        ##        pgf('reverse_string').next_filter = pgf('sink')

        for string in self.source_data:
            pipeline1.send(dfb.DataPacket(string))
        print '**1700** about to close'
        pipeline1.shut_down()
        for part in pgf('sink').results:
            print '**2482**  ', part.data


##        print pgf('sink'].all_data
        self.assertEquals(pgf('sink').results[-8].data, '321')
        self.assertEquals(pgf('sink').results[-7].data, '654')
        self.assertEquals(pgf('sink').results[-6].data, '496')
        self.assertEquals(pgf('sink').results[-5].data, '893')
        self.assertEquals(pgf('sink').results[-4].data, '167')
        self.assertEquals(pgf('sink').results[-3].data, '010')
        self.assertEquals(pgf('sink').results[-2].data, '987')
        self.assertEquals(pgf('sink').results[-1].data, 'X')