def get_realtime_visualization_data(self, query_token=''): """This operation returns a block of visualization data for displaying data product in real time. This operation requires a user specific token which was provided from a previous request to the init_realtime_visualization operation. @param query_token str @retval datatable str @throws NotFound Throws if specified query_token or its visualization product does not exist """ log.debug( "get_realtime_visualization_data Vis worker: %s", self.id) ret_val = [] if not query_token: raise BadRequest("The query_token parameter is missing") #Taking advantage of idempotency xq = self.container.ex_manager.create_xn_queue(query_token) subscriber = Subscriber(from_name=xq) subscriber.initialize() msgs = subscriber.get_all_msgs(timeout=2) for x in range(len(msgs)): msgs[x].ack() # Different messages should get processed differently. Ret val will be decided by the viz product type ret_val = self._process_visualization_message(msgs) #TODO - replace as need be to return valid GDT data #return {'viz_data': ret_val} return ret_val
def get_realtime_visualization_data(self, query_token='', callback='', tqx=""): """This operation returns a block of visualization data for displaying data product in real time. This operation requires a user specific token which was provided from a previous request to the init_realtime_visualization operation. @param query_token str @retval datatable str @throws NotFound Throws if specified query_token or its visualization product does not exist """ print " >>>>>>>>>>>>>>> QUERY TOKEN : ", query_token print " >>>>>>>>>>>>>>> callback : ", callback print ">>>>>>>>>>>>>>> TQX : ", tqx reqId = 0 # If a reqId was passed in tqx, extract it if tqx: tqx_param_list = tqx.split(";") for param in tqx_param_list: key, value = param.split(":") if key == 'reqId': reqId = value ret_val = [] if not query_token: raise BadRequest("The query_token parameter is missing") #try: #Taking advantage of idempotency xq = self.container.ex_manager.create_xn_queue(query_token) subscriber = Subscriber(from_name=xq) subscriber.initialize() msgs = subscriber.get_all_msgs(timeout=2) for x in range(len(msgs)): msgs[x].ack() # Different messages should get processed differently. Ret val will be decided by the viz product type ret_val = self._process_visualization_message(msgs, callback, reqId) #except Exception, e: # raise e #finally: # subscriber.close() #TODO - replace as need be to return valid GDT data #return {'viz_data': ret_val} return ret_val
def get_realtime_visualization_data(self, query_token='', callback='', tqx=""): """This operation returns a block of visualization data for displaying data product in real time. This operation requires a user specific token which was provided from a previous request to the init_realtime_visualization operation. @param query_token str @retval datatable str @throws NotFound Throws if specified query_token or its visualization product does not exist """ log.debug("Query token : " + query_token + " CB : " + callback + "TQX : " + tqx) reqId = 0 # If a reqId was passed in tqx, extract it if tqx: tqx_param_list = tqx.split(";") for param in tqx_param_list: key, value = param.split(":") if key == 'reqId': reqId = value ret_val = [] if not query_token: raise BadRequest("The query_token parameter is missing") #try: #Taking advantage of idempotency xq = self.container.ex_manager.create_xn_queue(query_token) subscriber = Subscriber(from_name=xq) subscriber.initialize() msgs = subscriber.get_all_msgs(timeout=2) for x in range(len(msgs)): msgs[x].ack() # Different messages should get processed differently. Ret val will be decided by the viz product type ret_val = self._process_visualization_message(msgs, callback, reqId) #except Exception, e: # raise e #finally: # subscriber.close() #TODO - replace as need be to return valid GDT data #return {'viz_data': ret_val} return ret_val
def get_realtime_visualization_data(self, query_token=''): """This operation returns a block of visualization data for displaying data product in real time. This operation requires a user specific token which was provided from a previous request to the init_realtime_visualization operation. @param query_token str @retval datatable str @throws NotFound Throws if specified query_token or its visualization product does not exist """ log.debug("get_realtime_visualization_data Vis worker: %s", self.id) ret_val = [] if not query_token: raise BadRequest("The query_token parameter is missing") try: #Taking advantage of idempotency queue_name = '-'.join([USER_VISUALIZATION_QUEUE, query_token]) xq = self.container.ex_manager.create_xn_queue(queue_name) subscriber = Subscriber(from_name=xq) subscriber.initialize() except: # Close the subscriber if it exists if subscriber: subscriber.close() raise BadRequest("Could not subscribe to the real-time queue") msgs = subscriber.get_all_msgs(timeout=2) for x in range(len(msgs)): msgs[x].ack() subscriber.close() # Different messages should get processed differently. Ret val will be decided by the viz product type ret_val = self._process_visualization_message(msgs) return ret_val
def get_realtime_visualization_data(self, query_token=''): """This operation returns a block of visualization data for displaying data product in real time. This operation requires a user specific token which was provided from a previous request to the init_realtime_visualization operation. @param query_token str @retval datatable str @throws NotFound Throws if specified query_token or its visualization product does not exist """ log.debug( "get_realtime_visualization_data Vis worker: %s", self.id) ret_val = [] if not query_token: raise BadRequest("The query_token parameter is missing") try: #Taking advantage of idempotency queue_name = '-'.join([USER_VISUALIZATION_QUEUE, query_token]) xq = self.container.ex_manager.create_xn_queue(queue_name) subscriber = Subscriber(from_name=xq) subscriber.initialize() except: # Close the subscriber if it exists if subscriber: subscriber.close() raise BadRequest("Could not subscribe to the real-time queue") msgs = subscriber.get_all_msgs(timeout=2) for x in range(len(msgs)): msgs[x].ack() subscriber.close() # Different messages should get processed differently. Ret val will be decided by the viz product type ret_val = self._process_visualization_message(msgs) return ret_val
def get_realtime_visualization_data(self, query_token=''): """This operation returns a block of visualization data for displaying data product in real time. This operation requires a user specific token which was provided from a previsou request to the init_realtime_visualization operation. @param query_token str @retval datatable str @throws NotFound Throws if specified query_token or its visualization product does not exist """ if not query_token: raise BadRequest("The query_token parameter is missing") try: #Taking advantage of idempotency xq = self.container.ex_manager.create_xn_queue(query_token) subscriber = Subscriber(from_name=xq) subscriber.initialize() msg_count,_ = subscriber.get_stats() log.info('Messages in user queue 1: %s ' % msg_count) ret_val = [] msgs = subscriber.get_all_msgs(timeout=2) for x in range(len(msgs)): msgs[x].ack() # Different messages should get processed differently. Ret val will be decided by the viz product type ret_val = self._process_visualization_message(msgs) msg_count,_ = subscriber.get_stats() log.info('Messages in user queue 2: %s ' % msg_count) except Exception, e: raise e
def test_visualization_queue(self): #The list of data product streams to monitor data_product_stream_ids = list() #Create the input data product ctd_stream_id, ctd_parsed_data_product_id = self.create_ctd_input_stream_and_data_product() data_product_stream_ids.append(ctd_stream_id) user_queue_name = USER_VISUALIZATION_QUEUE xq = self.container.ex_manager.create_xn_queue(user_queue_name) salinity_subscription_id = self.pubsubclient.create_subscription( stream_ids=data_product_stream_ids, exchange_name = user_queue_name, name = "user visualization queue" ) subscriber = Subscriber(from_name=xq) subscriber.initialize() # after the queue has been created it is safe to activate the subscription self.pubsubclient.activate_subscription(subscription_id=salinity_subscription_id) #Start the output stream listener to monitor and collect messages #results = self.start_output_stream_and_listen(None, data_product_stream_ids) #Not sure why this is needed - but it is #subscriber._chan.stop_consume() ctd_sim_pid = self.start_simple_input_stream_process(ctd_stream_id) gevent.sleep(10.0) # Send some messages - don't care how many msg_count,_ = xq.get_stats() log.info('Messages in user queue 1: %s ' % msg_count) #Validate the data from each of the messages along the way #self.validate_messages(results) # for x in range(msg_count): # mo = subscriber.get_one_msg(timeout=1) # print mo.body # mo.ack() msgs = subscriber.get_all_msgs(timeout=2) for x in range(len(msgs)): msgs[x].ack() self.validate_messages(msgs[x]) # print msgs[x].body #Should be zero after pulling all of the messages. msg_count,_ = xq.get_stats() log.info('Messages in user queue 2: %s ' % msg_count) #Trying to continue to receive messages in the queue gevent.sleep(5.0) # Send some messages - don't care how many #Turning off after everything - since it is more representative of an always on stream of data! self.process_dispatcher.cancel_process(ctd_sim_pid) # kill the ctd simulator process - that is enough data #Should see more messages in the queue msg_count,_ = xq.get_stats() log.info('Messages in user queue 3: %s ' % msg_count) msgs = subscriber.get_all_msgs(timeout=2) for x in range(len(msgs)): msgs[x].ack() self.validate_messages(msgs[x]) #Should be zero after pulling all of the messages. msg_count,_ = xq.get_stats() log.info('Messages in user queue 4: %s ' % msg_count) subscriber.close() self.container.ex_manager.delete_xn(xq)
def test_consume_one_message_at_a_time(self): # see also pyon.net.test.test_channel:TestChannelInt.test_consume_one_message_at_a_time pub3 = Publisher(to_name=(self.container.ex_manager.default_xs.exchange, 'routed.3')) pub5 = Publisher(to_name=(self.container.ex_manager.default_xs.exchange, 'routed.5')) # # SETUP COMPLETE, BEGIN TESTING OF EXCHANGE OBJECTS # xq = self.container.ex_manager.create_xn_queue('random_queue') self.addCleanup(xq.delete) # recv'd messages from the subscriber self.recv_queue = Queue() def cb(m, h): raise StandardError("Subscriber callback never gets called back!") sub = Subscriber(from_name=xq, callback=cb) sub.initialize() # publish 10 messages - we're not bound yet, so they'll just dissapear for x in xrange(10): pub3.publish("3,%s" % str(x)) # allow time for routing time.sleep(2) # no messages yet self.assertRaises(Timeout, sub.get_one_msg, timeout=0) # now, we'll bind the xq xq.bind('routed.3') # even tho we are consuming, there are no messages - the previously published ones all dissapeared self.assertRaises(Timeout, sub.get_one_msg, timeout=0) # publish those messages again for x in xrange(10): pub3.publish("3,%s" % str(x)) # allow time for routing time.sleep(2) # NOW we have messages! for x in xrange(10): mo = sub.get_one_msg(timeout=10) self.assertEquals(mo.body, "3,%s" % str(x)) mo.ack() # we've cleared it all self.assertRaises(Timeout, sub.get_one_msg, timeout=0) # bind a wildcard and publish on both xq.bind('routed.*') for x in xrange(10): time.sleep(0.3) pub3.publish("3,%s" % str(x)) time.sleep(0.3) pub5.publish("5,%s" % str(x)) # allow time for routing time.sleep(2) # should get all 20, interleaved for x in xrange(10): mo = sub.get_one_msg(timeout=1) self.assertEquals(mo.body, "3,%s" % str(x)) mo.ack() mo = sub.get_one_msg(timeout=1) self.assertEquals(mo.body, "5,%s" % str(x)) mo.ack() # add 5 binding, remove all other bindings xq.bind('routed.5') xq.unbind('routed.3') xq.unbind('routed.*') # try publishing to 3, shouldn't arrive anymore pub3.publish("3") self.assertRaises(Timeout, sub.get_one_msg, timeout=0) # let's turn off the consumer and let things build up a bit sub._chan.stop_consume() for x in xrange(10): pub5.publish("5,%s" % str(x)) # allow time for routing time.sleep(2) # 10 messages in the queue, no consumers self.assertTupleEqual((10, 0), sub._chan.get_stats()) # drain queue sub._chan.start_consume() for x in xrange(10): mo = sub.get_one_msg(timeout=1) mo.ack() sub.close()
def test_multiple_visualization_queue(self): # set up a workflow with the salinity transform and the doubler. We will direct the original stream and the doubled stream to queues # and test to make sure the subscription to the queues is working correctly assertions = self.assertTrue # Build the workflow definition workflow_def_obj = IonObject(RT.WorkflowDefinition, name='Viz_Test_Workflow',description='A workflow to test collection of multiple data products in queues') workflow_data_product_name = 'TEST-Workflow_Output_Product' #Set a specific output product name #------------------------------------------------------------------------------------------------------------------------- #Add a transformation process definition for salinity #------------------------------------------------------------------------------------------------------------------------- ctd_L2_salinity_dprocdef_id = self.create_salinity_data_process_definition() workflow_step_obj = IonObject('DataProcessWorkflowStep', data_process_definition_id=ctd_L2_salinity_dprocdef_id, persist_process_output_data=False) #Don't persist the intermediate data product configuration = {'stream_name' : 'salinity'} workflow_step_obj.configuration = configuration workflow_def_obj.workflow_steps.append(workflow_step_obj) #Create it in the resource registry workflow_def_id = self.workflowclient.create_workflow_definition(workflow_def_obj) aids = self.rrclient.find_associations(workflow_def_id, PRED.hasDataProcessDefinition) assertions(len(aids) == 1 ) #The list of data product streams to monitor data_product_stream_ids = list() #Create the input data product ctd_stream_id, ctd_parsed_data_product_id = self.create_ctd_input_stream_and_data_product() data_product_stream_ids.append(ctd_stream_id) #Create and start the workflow workflow_id, workflow_product_id = self.workflowclient.create_data_process_workflow(workflow_def_id, ctd_parsed_data_product_id, timeout=30) workflow_output_ids,_ = self.rrclient.find_subjects(RT.Workflow, PRED.hasOutputProduct, workflow_product_id, True) assertions(len(workflow_output_ids) == 1 ) #Walk the associations to find the appropriate output data streams to validate the messages workflow_dp_ids,_ = self.rrclient.find_objects(workflow_id, PRED.hasDataProduct, RT.DataProduct, True) assertions(len(workflow_dp_ids) == 1 ) for dp_id in workflow_dp_ids: stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStream, None, True) assertions(len(stream_ids) == 1 ) data_product_stream_ids.append(stream_ids[0]) # Now for each of the data_product_stream_ids create a queue and pipe their data to the queue user_queue_name1 = USER_VISUALIZATION_QUEUE + '1' user_queue_name2 = USER_VISUALIZATION_QUEUE + '2' # use idempotency to create queues xq1 = self.container.ex_manager.create_xn_queue(user_queue_name1) self.addCleanup(xq1.delete) xq2 = self.container.ex_manager.create_xn_queue(user_queue_name2) self.addCleanup(xq2.delete) xq1.purge() xq2.purge() # the create_subscription call takes a list of stream_ids so create temp ones dp_stream_id1 = list() dp_stream_id1.append(data_product_stream_ids[0]) dp_stream_id2 = list() dp_stream_id2.append(data_product_stream_ids[1]) salinity_subscription_id1 = self.pubsubclient.create_subscription( stream_ids=dp_stream_id1, exchange_name = user_queue_name1, name = "user visualization queue1") salinity_subscription_id2 = self.pubsubclient.create_subscription( stream_ids=dp_stream_id2, exchange_name = user_queue_name2, name = "user visualization queue2") # Create subscribers for the output of the queue subscriber1 = Subscriber(from_name=xq1) subscriber1.initialize() subscriber2 = Subscriber(from_name=xq2) subscriber2.initialize() # after the queue has been created it is safe to activate the subscription self.pubsubclient.activate_subscription(subscription_id=salinity_subscription_id1) self.pubsubclient.activate_subscription(subscription_id=salinity_subscription_id2) # Start input stream and wait for some time ctd_sim_pid = self.start_simple_input_stream_process(ctd_stream_id) gevent.sleep(5.0) # Send some messages - don't care how many msg_count,_ = xq1.get_stats() log.info('Messages in user queue 1: %s ' % msg_count) msg_count,_ = xq2.get_stats() log.info('Messages in user queue 2: %s ' % msg_count) msgs1 = subscriber1.get_all_msgs(timeout=2) msgs2 = subscriber2.get_all_msgs(timeout=2) for x in range(min(len(msgs1), len(msgs2))): msgs1[x].ack() msgs2[x].ack() self.validate_multiple_vis_queue_messages(msgs1[x].body, msgs2[x].body) # kill the ctd simulator process - that is enough data self.process_dispatcher.cancel_process(ctd_sim_pid) # close the subscription and queues subscriber1.close() subscriber2.close() return
def test_consume_one_message_at_a_time(self): # see also pyon.net.test.test_channel:TestChannelInt.test_consume_one_message_at_a_time pub3 = Publisher(to_name=(self.container.ex_manager.default_xs.exchange, 'routed.3')) pub5 = Publisher(to_name=(self.container.ex_manager.default_xs.exchange, 'routed.5')) # # SETUP COMPLETE, BEGIN TESTING OF EXCHANGE OBJECTS # xq = self.container.ex_manager.create_xn_queue('random_queue') self.addCleanup(xq.delete) # recv'd messages from the subscriber self.recv_queue = Queue() def cb(m, h): raise StandardError("Subscriber callback never gets called back!") sub = Subscriber(from_name=xq, callback=cb) sub.initialize() # publish 10 messages - we're not bound yet, so they'll just dissapear for x in xrange(10): pub3.publish("3,%s" % str(x)) # no messages yet self.assertRaises(Timeout, sub.get_one_msg, timeout=0) # now, we'll bind the xq xq.bind('routed.3') # even tho we are consuming, there are no messages - the previously published ones all dissapeared self.assertRaises(Timeout, sub.get_one_msg, timeout=0) # publish those messages again for x in xrange(10): pub3.publish("3,%s" % str(x)) # NOW we have messages! for x in xrange(10): mo = sub.get_one_msg(timeout=1) self.assertEquals(mo.body, "3,%s" % str(x)) mo.ack() # we've cleared it all self.assertRaises(Timeout, sub.get_one_msg, timeout=0) # bind a wildcard and publish on both xq.bind('routed.*') for x in xrange(10): time.sleep(0.3) pub3.publish("3,%s" % str(x)) time.sleep(0.3) pub5.publish("5,%s" % str(x)) # should get all 20, interleaved for x in xrange(10): mo = sub.get_one_msg(timeout=1) self.assertEquals(mo.body, "3,%s" % str(x)) mo.ack() mo = sub.get_one_msg(timeout=1) self.assertEquals(mo.body, "5,%s" % str(x)) mo.ack() # add 5 binding, remove all other bindings xq.bind('routed.5') xq.unbind('routed.3') xq.unbind('routed.*') # try publishing to 3, shouldn't arrive anymore pub3.publish("3") self.assertRaises(Timeout, sub.get_one_msg, timeout=0) # let's turn off the consumer and let things build up a bit sub._chan.stop_consume() for x in xrange(10): pub5.publish("5,%s" % str(x)) # 10 messages in the queue, no consumers self.assertTupleEqual((10, 0), sub._chan.get_stats()) # drain queue sub._chan.start_consume() for x in xrange(10): mo = sub.get_one_msg(timeout=1) mo.ack() sub.close()