コード例 #1
0
class AsyncResultWaiter(object):
    """
    Class that makes waiting for an async result notification easy.
    Creates a subscriber for a generated token name, which can be handed to the async provider.
    The provider then publishes the result to the token name when ready.
    The caller can wait for the result or timeout.
    """
    def __init__(self, process=None):
        self.process = process

        self.async_res = AsyncResult()
        self.wait_name = "asyncresult_" + create_simple_unique_id()
        if self.process:
            self.wait_name = self.wait_name + "_" + self.process.id
        # TODO: Use same mechanism as pooled RPC response endpoint (without the request)
        self.wait_sub = Subscriber(from_name=self.wait_name,
                                   callback=self._result_callback,
                                   auto_delete=True)
        self.activated = False

    def activate(self):
        if self.activated:
            raise BadRequest("Already active")
        self.listen_gl = spawn(self.wait_sub.listen
                               )  # This initializes and activates the listener
        self.wait_sub.get_ready_event().wait(timeout=1)
        self.activated = True

        return self.wait_name

    def _result_callback(self, msg, headers):
        log.debug("AsyncResultWaiter: received message")
        self.async_res.set(msg)

    def await (self, timeout=None, request_id=None):
        try:
            result = self.async_res.get(timeout=timeout)
            if request_id and isinstance(
                    result,
                    AsyncResultMsg) and result.request_id != request_id:
                log.warn("Received result for different request: %s", result)
                result = None

        except gevent.Timeout:
            raise Timeout("Timeout in AsyncResultWaiter name={}".format(
                self.wait_name))
        finally:
            self.wait_sub.deactivate()
            self.wait_sub.close()
            self.listen_gl.join(timeout=1)
            self.activated = False

        return result
コード例 #2
0
    def get_realtime_visualization_data(self, query_token=''):
        """This operation returns a block of visualization data for displaying data product in real time. This operation requires a
        user specific token which was provided from a previous request to the init_realtime_visualization operation.

        @param query_token    str
        @retval datatable    str
        @throws NotFound    Throws if specified query_token or its visualization product does not exist
        """
        log.debug("get_realtime_visualization_data Vis worker: %s", self.id)

        ret_val = []
        if not query_token:
            raise BadRequest("The query_token parameter is missing")

        try:
            #Taking advantage of idempotency
            queue_name = '-'.join([USER_VISUALIZATION_QUEUE, query_token])
            xq = self.container.ex_manager.create_xn_queue(queue_name)

            subscriber = Subscriber(from_name=xq)
            subscriber.initialize()

        except:
            # Close the subscriber if it exists
            if subscriber:
                subscriber.close()

            raise BadRequest("Could not subscribe to the real-time queue")

        msgs = subscriber.get_all_msgs(timeout=2)
        for x in range(len(msgs)):
            msgs[x].ack()

        subscriber.close()

        # Different messages should get processed differently. Ret val will be decided by the viz product type
        ret_val = self._process_visualization_message(msgs)

        return ret_val
コード例 #3
0
    def get_realtime_visualization_data(self, query_token=''):
        """This operation returns a block of visualization data for displaying data product in real time. This operation requires a
        user specific token which was provided from a previous request to the init_realtime_visualization operation.

        @param query_token    str
        @retval datatable    str
        @throws NotFound    Throws if specified query_token or its visualization product does not exist
        """
        log.debug( "get_realtime_visualization_data Vis worker: %s", self.id)

        ret_val = []
        if not query_token:
            raise BadRequest("The query_token parameter is missing")

        try:
            #Taking advantage of idempotency
            queue_name = '-'.join([USER_VISUALIZATION_QUEUE, query_token])
            xq = self.container.ex_manager.create_xn_queue(queue_name)

            subscriber = Subscriber(from_name=xq)
            subscriber.initialize()

        except:
            # Close the subscriber if it exists
            if subscriber:
                subscriber.close()

            raise BadRequest("Could not subscribe to the real-time queue")

        msgs = subscriber.get_all_msgs(timeout=2)
        for x in range(len(msgs)):
            msgs[x].ack()

        subscriber.close()

        # Different messages should get processed differently. Ret val will be decided by the viz product type
        ret_val = self._process_visualization_message(msgs)

        return ret_val
コード例 #4
0
    def test_visualization_queue(self):

        #The list of data product streams to monitor
        data_product_stream_ids = list()

        #Create the input data product
        ctd_stream_id, ctd_parsed_data_product_id = self.create_ctd_input_stream_and_data_product()
        data_product_stream_ids.append(ctd_stream_id)

        user_queue_name = USER_VISUALIZATION_QUEUE

        xq = self.container.ex_manager.create_xn_queue(user_queue_name)

        salinity_subscription_id = self.pubsubclient.create_subscription(
            stream_ids=data_product_stream_ids,
            exchange_name = user_queue_name,
            name = "user visualization queue"
        )

        subscriber = Subscriber(from_name=xq)
        subscriber.initialize()

        # after the queue has been created it is safe to activate the subscription
        self.pubsubclient.activate_subscription(subscription_id=salinity_subscription_id)

        #Start the output stream listener to monitor and collect messages
        #results = self.start_output_stream_and_listen(None, data_product_stream_ids)

        #Not sure why this is needed - but it is
        #subscriber._chan.stop_consume()

        ctd_sim_pid = self.start_simple_input_stream_process(ctd_stream_id)
        gevent.sleep(10.0)  # Send some messages - don't care how many

        msg_count,_ = xq.get_stats()
        log.info('Messages in user queue 1: %s ' % msg_count)

        #Validate the data from each of the messages along the way
        #self.validate_messages(results)

#        for x in range(msg_count):
#            mo = subscriber.get_one_msg(timeout=1)
#            print mo.body
#            mo.ack()

        msgs = subscriber.get_all_msgs(timeout=2)
        for x in range(len(msgs)):
            msgs[x].ack()
            self.validate_messages(msgs[x])
           # print msgs[x].body



        #Should be zero after pulling all of the messages.
        msg_count,_ = xq.get_stats()
        log.info('Messages in user queue 2: %s ' % msg_count)


        #Trying to continue to receive messages in the queue
        gevent.sleep(5.0)  # Send some messages - don't care how many


        #Turning off after everything - since it is more representative of an always on stream of data!
        self.process_dispatcher.cancel_process(ctd_sim_pid) # kill the ctd simulator process - that is enough data



        #Should see more messages in the queue
        msg_count,_ = xq.get_stats()
        log.info('Messages in user queue 3: %s ' % msg_count)

        msgs = subscriber.get_all_msgs(timeout=2)
        for x in range(len(msgs)):
            msgs[x].ack()
            self.validate_messages(msgs[x])

        #Should be zero after pulling all of the messages.
        msg_count,_ = xq.get_stats()
        log.info('Messages in user queue 4: %s ' % msg_count)

        subscriber.close()
        self.container.ex_manager.delete_xn(xq)
コード例 #5
0
ファイル: test_exchange.py プロジェクト: j2project/pyon
    def test_consume_one_message_at_a_time(self):
        # see also pyon.net.test.test_channel:TestChannelInt.test_consume_one_message_at_a_time

        pub3 = Publisher(to_name=(self.container.ex_manager.default_xs.exchange, 'routed.3'))
        pub5 = Publisher(to_name=(self.container.ex_manager.default_xs.exchange, 'routed.5'))

        #
        # SETUP COMPLETE, BEGIN TESTING OF EXCHANGE OBJECTS
        #

        xq = self.container.ex_manager.create_xn_queue('random_queue')
        self.addCleanup(xq.delete)

        # recv'd messages from the subscriber
        self.recv_queue = Queue()

        def cb(m, h):
            raise StandardError("Subscriber callback never gets called back!")

        sub = Subscriber(from_name=xq, callback=cb)
        sub.initialize()

        # publish 10 messages - we're not bound yet, so they'll just dissapear
        for x in xrange(10):
            pub3.publish("3,%s" % str(x))

        # allow time for routing
        time.sleep(2)

        # no messages yet
        self.assertRaises(Timeout, sub.get_one_msg, timeout=0)

        # now, we'll bind the xq
        xq.bind('routed.3')

        # even tho we are consuming, there are no messages - the previously published ones all dissapeared
        self.assertRaises(Timeout, sub.get_one_msg, timeout=0)

        # publish those messages again
        for x in xrange(10):
            pub3.publish("3,%s" % str(x))

        # allow time for routing
        time.sleep(2)

        # NOW we have messages!
        for x in xrange(10):
            mo = sub.get_one_msg(timeout=10)
            self.assertEquals(mo.body, "3,%s" % str(x))
            mo.ack()

        # we've cleared it all
        self.assertRaises(Timeout, sub.get_one_msg, timeout=0)

        # bind a wildcard and publish on both
        xq.bind('routed.*')

        for x in xrange(10):
            time.sleep(0.3)
            pub3.publish("3,%s" % str(x))
            time.sleep(0.3)
            pub5.publish("5,%s" % str(x))

        # allow time for routing
        time.sleep(2)

        # should get all 20, interleaved
        for x in xrange(10):
            mo = sub.get_one_msg(timeout=1)
            self.assertEquals(mo.body, "3,%s" % str(x))
            mo.ack()

            mo = sub.get_one_msg(timeout=1)
            self.assertEquals(mo.body, "5,%s" % str(x))
            mo.ack()

        # add 5 binding, remove all other bindings
        xq.bind('routed.5')
        xq.unbind('routed.3')
        xq.unbind('routed.*')

        # try publishing to 3, shouldn't arrive anymore
        pub3.publish("3")

        self.assertRaises(Timeout, sub.get_one_msg, timeout=0)

        # let's turn off the consumer and let things build up a bit
        sub._chan.stop_consume()

        for x in xrange(10):
            pub5.publish("5,%s" % str(x))

        # allow time for routing
        time.sleep(2)

        # 10 messages in the queue, no consumers
        self.assertTupleEqual((10, 0), sub._chan.get_stats())

        # drain queue
        sub._chan.start_consume()

        for x in xrange(10):
            mo = sub.get_one_msg(timeout=1)
            mo.ack()

        sub.close()
コード例 #6
0
ファイル: transform.py プロジェクト: oldpatricka/pyon
class TransformBenchTesting(TransformDataProcess):
    """
    Easiest way to run:
    from pyon.util.containers import DotDict
    tbt=cc.proc_manager._create_service_instance('55', 'tbt', 'pyon.ion.transform', 'TransformBenchTesting', DotDict({'process':{'name':'tbt', 'transform_id':'55'}}))
    tbt.init()
    tbt.start()
    """
    transform_number = 0
    message_length = 0

    def __init__(self):
        super(TransformBenchTesting, self).__init__()
        self.count = 0
        TransformBenchTesting.transform_number += 1

    def perf(self):

        with open('/tmp/pyon_performance.dat', 'a') as f:
            then = time.time()
            ocount = self.count
            while True:
                gevent.sleep(2.)
                now = time.time()
                count = self.count
                delta_t = now - then
                delta_c = count - ocount

                f.write(
                    '%s|%s\t%s\t%s\t%3.3f\n' %
                    (get_sys_name(), time.strftime("%H:%M:%S", time.gmtime()),
                     TransformBenchTesting.message_length,
                     TransformBenchTesting.transform_number,
                     float(delta_c) / delta_t))
                then = now
                ocount = count
                f.flush()

    @staticmethod
    def launch_benchmark(transform_number=1, primer=1, message_length=4):
        import gevent
        from gevent.greenlet import Greenlet
        from pyon.util.containers import DotDict
        from pyon.net.transport import NameTrio
        from pyon.net.endpoint import Publisher
        import uuid
        num = transform_number
        msg_len = message_length
        transforms = list()
        pids = 1
        TransformBenchTesting.message_length = message_length
        cc = Container.instance
        pub = Publisher(to_name=NameTrio(get_sys_name(),
                                         str(uuid.uuid4())[0:6]))
        for i in xrange(num):
            tbt = cc.proc_manager._create_service_instance(
                str(pids), 'tbt', 'prototype.transforms.linear',
                'TransformInPlace',
                DotDict({
                    'process': {
                        'name': 'tbt%d' % pids,
                        'transform_id': pids
                    }
                }))
            tbt.init()
            tbt.start()
            gevent.sleep(0.2)
            for i in xrange(primer):
                pub.publish(list(xrange(msg_len)))
            g = Greenlet(tbt.perf)
            g.start()
            transforms.append(tbt)
            pids += 1

    def on_start(self):
        TransformDataProcess.on_start(self)

        # set up subscriber to *
        self._bt_sub = Subscriber(callback=lambda m, h: self.call_process(m),
                                  from_name=NameTrio(get_sys_name(),
                                                     'bench_queue', '*'))

        # spawn listener
        self._sub_gl = spawn(self._bt_sub.listen)

        # set up publisher to anything!
        self._bt_pub = Publisher(to_name=NameTrio(get_sys_name(),
                                                  str(uuid.uuid4())[0:6]))

    def publish(self, msg):
        self._bt_pub.publish(msg)
        self.count += 1

    def _stop_listener(self):
        self._bt_sub.close()
        self._sub_gl.join(timeout=2)
        self._sub_gl.kill()

    def on_stop(self):
        TransformDataProcess.on_stop(self)
        self._stop_listener()

    def on_quit(self):
        TransformDataProcess.on_quit(self)
        self._stop_listener()
コード例 #7
0
ファイル: test_exchange.py プロジェクト: oldpatricka/pyon
    def test_consume_one_message_at_a_time(self):
        # see also pyon.net.test.test_channel:TestChannelInt.test_consume_one_message_at_a_time

        pub3 = Publisher(to_name=(self.container.ex_manager.default_xs.exchange, 'routed.3'))
        pub5 = Publisher(to_name=(self.container.ex_manager.default_xs.exchange, 'routed.5'))

        #
        # SETUP COMPLETE, BEGIN TESTING OF EXCHANGE OBJECTS
        #

        xq = self.container.ex_manager.create_xn_queue('random_queue')
        self.addCleanup(xq.delete)

        # recv'd messages from the subscriber
        self.recv_queue = Queue()

        sub = Subscriber(from_name=xq, callback=lambda m,h: self.recv_queue.put((m, h)))
        sub.prepare_listener()

        # publish 10 messages - we're not bound yet, so they'll just dissapear
        for x in xrange(10):
            pub3.publish("3,%s" % str(x))

        # no messages yet
        self.assertFalse(sub.get_one_msg(timeout=0))

        # now, we'll bind the xq
        xq.bind('routed.3')

        # even tho we are consuming, there are no messages - the previously published ones all dissapeared
        self.assertFalse(sub.get_one_msg(timeout=0))

        # publish those messages again
        for x in xrange(10):
            pub3.publish("3,%s" % str(x))

        # NOW we have messages!
        for x in xrange(10):
            self.assertTrue(sub.get_one_msg(timeout=0))
            m,h = self.recv_queue.get(timeout=0)
            self.assertEquals(m, "3,%s" % str(x))

        # we've cleared it all
        self.assertFalse(sub.get_one_msg(timeout=0))

        # bind a wildcard and publish on both
        xq.bind('routed.*')

        for x in xrange(10):
            time.sleep(0.3)
            pub3.publish("3,%s" % str(x))
            time.sleep(0.3)
            pub5.publish("5,%s" % str(x))

        # should get all 20, interleaved
        for x in xrange(10):
            self.assertTrue(sub.get_one_msg(timeout=0))
            m, h = self.recv_queue.get(timeout=0)
            self.assertEquals(m, "3,%s" % str(x))

            self.assertTrue(sub.get_one_msg(timeout=0))
            m, h = self.recv_queue.get(timeout=0)
            self.assertEquals(m, "5,%s" % str(x))

        # add 5 binding, remove all other bindings
        xq.bind('routed.5')
        xq.unbind('routed.3')
        xq.unbind('routed.*')

        # try publishing to 3, shouldn't arrive anymore
        pub3.publish("3")

        self.assertFalse(sub.get_one_msg(timeout=0))

        # let's turn off the consumer and let things build up a bit
        sub._chan.stop_consume()

        for x in xrange(10):
            pub5.publish("5,%s" % str(x))

        # 10 messages in the queue, no consumers
        self.assertTupleEqual((10, 0), sub._chan.get_stats())

        # drain queue
        sub._chan.start_consume()
        time.sleep(1)       # yield to allow delivery

        for x in xrange(10):
            self.assertTrue(sub.get_one_msg(timeout=0))
            self.recv_queue.get(timeout=0)

        sub.close()
コード例 #8
0
    def test_visualization_queue(self):

        #The list of data product streams to monitor
        data_product_stream_ids = list()

        #Create the input data product
        ctd_stream_id, ctd_parsed_data_product_id = self.create_ctd_input_stream_and_data_product()
        data_product_stream_ids.append(ctd_stream_id)

        user_queue_name = USER_VISUALIZATION_QUEUE

        xq = self.container.ex_manager.create_xn_queue(user_queue_name)

        salinity_subscription_id = self.pubsubclient.create_subscription(
            stream_ids=data_product_stream_ids,
            exchange_name = user_queue_name,
            name = "user visualization queue"
        )

        subscriber = Subscriber(from_name=xq)
        subscriber.initialize()

        # after the queue has been created it is safe to activate the subscription
        self.pubsubclient.activate_subscription(subscription_id=salinity_subscription_id)

        #Start the output stream listener to monitor and collect messages
        #results = self.start_output_stream_and_listen(None, data_product_stream_ids)

        #Not sure why this is needed - but it is
        #subscriber._chan.stop_consume()

        ctd_sim_pid = self.start_simple_input_stream_process(ctd_stream_id)
        gevent.sleep(10.0)  # Send some messages - don't care how many

        msg_count,_ = xq.get_stats()
        log.info('Messages in user queue 1: %s ' % msg_count)

        #Validate the data from each of the messages along the way
        #self.validate_messages(results)

#        for x in range(msg_count):
#            mo = subscriber.get_one_msg(timeout=1)
#            print mo.body
#            mo.ack()

        msgs = subscriber.get_all_msgs(timeout=2)
        for x in range(len(msgs)):
            msgs[x].ack()
            self.validate_messages(msgs[x])
           # print msgs[x].body



        #Should be zero after pulling all of the messages.
        msg_count,_ = xq.get_stats()
        log.info('Messages in user queue 2: %s ' % msg_count)


        #Trying to continue to receive messages in the queue
        gevent.sleep(5.0)  # Send some messages - don't care how many


        #Turning off after everything - since it is more representative of an always on stream of data!
        self.process_dispatcher.cancel_process(ctd_sim_pid) # kill the ctd simulator process - that is enough data



        #Should see more messages in the queue
        msg_count,_ = xq.get_stats()
        log.info('Messages in user queue 3: %s ' % msg_count)

        msgs = subscriber.get_all_msgs(timeout=2)
        for x in range(len(msgs)):
            msgs[x].ack()
            self.validate_messages(msgs[x])

        #Should be zero after pulling all of the messages.
        msg_count,_ = xq.get_stats()
        log.info('Messages in user queue 4: %s ' % msg_count)

        subscriber.close()
        self.container.ex_manager.delete_xn(xq)
コード例 #9
0
    def test_multiple_visualization_queue(self):

        # set up a workflow with the salinity transform and the doubler. We will direct the original stream and the doubled stream to queues
        # and test to make sure the subscription to the queues is working correctly
        assertions = self.assertTrue

        # Build the workflow definition
        workflow_def_obj = IonObject(RT.WorkflowDefinition, name='Viz_Test_Workflow',description='A workflow to test collection of multiple data products in queues')

        workflow_data_product_name = 'TEST-Workflow_Output_Product' #Set a specific output product name
        #-------------------------------------------------------------------------------------------------------------------------
        #Add a transformation process definition for salinity
        #-------------------------------------------------------------------------------------------------------------------------

        ctd_L2_salinity_dprocdef_id = self.create_salinity_data_process_definition()
        workflow_step_obj = IonObject('DataProcessWorkflowStep', data_process_definition_id=ctd_L2_salinity_dprocdef_id, persist_process_output_data=False)  #Don't persist the intermediate data product
        configuration = {'stream_name' : 'salinity'}
        workflow_step_obj.configuration = configuration
        workflow_def_obj.workflow_steps.append(workflow_step_obj)

        #Create it in the resource registry
        workflow_def_id = self.workflowclient.create_workflow_definition(workflow_def_obj)

        aids = self.rrclient.find_associations(workflow_def_id, PRED.hasDataProcessDefinition)
        assertions(len(aids) == 1 )

        #The list of data product streams to monitor
        data_product_stream_ids = list()

        #Create the input data product
        ctd_stream_id, ctd_parsed_data_product_id = self.create_ctd_input_stream_and_data_product()
        data_product_stream_ids.append(ctd_stream_id)

        #Create and start the workflow
        workflow_id, workflow_product_id = self.workflowclient.create_data_process_workflow(workflow_def_id, ctd_parsed_data_product_id, timeout=30)

        workflow_output_ids,_ = self.rrclient.find_subjects(RT.Workflow, PRED.hasOutputProduct, workflow_product_id, True)
        assertions(len(workflow_output_ids) == 1 )

        #Walk the associations to find the appropriate output data streams to validate the messages
        workflow_dp_ids,_ = self.rrclient.find_objects(workflow_id, PRED.hasDataProduct, RT.DataProduct, True)
        assertions(len(workflow_dp_ids) == 1 )

        for dp_id in workflow_dp_ids:
            stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStream, None, True)
            assertions(len(stream_ids) == 1 )
            data_product_stream_ids.append(stream_ids[0])

        # Now for each of the data_product_stream_ids create a queue and pipe their data to the queue


        user_queue_name1 = USER_VISUALIZATION_QUEUE + '1'
        user_queue_name2 = USER_VISUALIZATION_QUEUE + '2'

        # use idempotency to create queues
        xq1 = self.container.ex_manager.create_xn_queue(user_queue_name1)
        self.addCleanup(xq1.delete)
        xq2 = self.container.ex_manager.create_xn_queue(user_queue_name2)
        self.addCleanup(xq2.delete)
        xq1.purge()
        xq2.purge()

        # the create_subscription call takes a list of stream_ids so create temp ones

        dp_stream_id1 = list()
        dp_stream_id1.append(data_product_stream_ids[0])
        dp_stream_id2 = list()
        dp_stream_id2.append(data_product_stream_ids[1])

        salinity_subscription_id1 = self.pubsubclient.create_subscription( stream_ids=dp_stream_id1,
            exchange_name = user_queue_name1, name = "user visualization queue1")

        salinity_subscription_id2 = self.pubsubclient.create_subscription( stream_ids=dp_stream_id2,
            exchange_name = user_queue_name2, name = "user visualization queue2")

        # Create subscribers for the output of the queue
        subscriber1 = Subscriber(from_name=xq1)
        subscriber1.initialize()
        subscriber2 = Subscriber(from_name=xq2)
        subscriber2.initialize()

        # after the queue has been created it is safe to activate the subscription
        self.pubsubclient.activate_subscription(subscription_id=salinity_subscription_id1)
        self.pubsubclient.activate_subscription(subscription_id=salinity_subscription_id2)

        # Start input stream and wait for some time
        ctd_sim_pid = self.start_simple_input_stream_process(ctd_stream_id)
        gevent.sleep(5.0)  # Send some messages - don't care how many

        msg_count,_ = xq1.get_stats()
        log.info('Messages in user queue 1: %s ' % msg_count)
        msg_count,_ = xq2.get_stats()
        log.info('Messages in user queue 2: %s ' % msg_count)

        msgs1 = subscriber1.get_all_msgs(timeout=2)
        msgs2 = subscriber2.get_all_msgs(timeout=2)

        for x in range(min(len(msgs1), len(msgs2))):
            msgs1[x].ack()
            msgs2[x].ack()
            self.validate_multiple_vis_queue_messages(msgs1[x].body, msgs2[x].body)

        # kill the ctd simulator process - that is enough data
        self.process_dispatcher.cancel_process(ctd_sim_pid)

        # close the subscription and queues
        subscriber1.close()
        subscriber2.close()

        return
コード例 #10
0
ファイル: transform.py プロジェクト: swarbhanu/pyon
class TransformBenchTesting(TransformDataProcess):
    """
    Easiest way to run:
    from pyon.util.containers import DotDict
    tbt=cc.proc_manager._create_service_instance('55', 'tbt', 'pyon.ion.transform', 'TransformBenchTesting', DotDict({'process':{'name':'tbt', 'transform_id':'55'}}))
    tbt.init()
    tbt.start()
    """
    transform_number = 0
    message_length = 0
    def __init__(self):
        super(TransformBenchTesting,self).__init__()
        self.count = 0
        TransformBenchTesting.transform_number += 1

        
    def perf(self):

        with open('/tmp/pyon_performance.dat','a') as f:
            then = time.time()
            ocount = self.count
            while True:
                gevent.sleep(2.)
                now = time.time()
                count = self.count
                delta_t = now - then
                delta_c = count - ocount

                f.write('%s|%s\t%s\t%s\t%3.3f\n' % (get_sys_name(),time.strftime("%H:%M:%S", time.gmtime()),TransformBenchTesting.message_length,TransformBenchTesting.transform_number, float(delta_c) / delta_t))
                then = now
                ocount = count
                f.flush()
            
        
        

    @staticmethod
    def launch_benchmark(transform_number=1, primer=1,message_length=4):
        import gevent
        from gevent.greenlet import Greenlet
        from pyon.util.containers import DotDict
        from pyon.net.transport import NameTrio
        from pyon.net.endpoint import Publisher
        import uuid
        num = transform_number
        msg_len = message_length
        transforms = list()
        pids = 1
        TransformBenchTesting.message_length = message_length
        cc = Container.instance
        pub = Publisher(to_name=NameTrio(get_sys_name(),str(uuid.uuid4())[0:6]))
        for i in xrange(num):
            tbt=cc.proc_manager._create_service_instance(str(pids), 'tbt', 'prototype.transforms.linear', 'TransformInPlace', DotDict({'process':{'name':'tbt%d' % pids, 'transform_id':pids}}))
            tbt.init()
            tbt.start()
            gevent.sleep(0.2)
            for i in xrange(primer):
                pub.publish(list(xrange(msg_len)))
            g = Greenlet(tbt.perf)
            g.start()
            transforms.append(tbt)
            pids += 1

    def on_start(self):
        TransformDataProcess.on_start(self)

        # set up subscriber to *
        self._bt_sub = Subscriber(callback=lambda m, h: self.call_process(m),
                                  from_name=NameTrio(get_sys_name(), 'bench_queue', '*'))

        # spawn listener
        self._sub_gl = spawn(self._bt_sub.listen)

        # set up publisher to anything!
        self._bt_pub = Publisher(to_name=NameTrio(get_sys_name(), str(uuid.uuid4())[0:6]))

    def publish(self, msg):
        self._bt_pub.publish(msg)
        self.count+=1

    def _stop_listener(self):
        self._bt_sub.close()
        self._sub_gl.join(timeout=2)
        self._sub_gl.kill()

    def on_stop(self):
        TransformDataProcess.on_stop(self)
        self._stop_listener()

    def on_quit(self):
        TransformDataProcess.on_quit(self)
        self._stop_listener()