def test_sample(self):
        result = yield self.driver_client.initialize('some arg')

        dpsc = DataPubsubClient(self.sup)
        topicname = 'SBE49 Topic'
        topic = PubSubTopicResource.create(topicname,"")

        # Use the service to create a queue and register the topic
        topic = yield dpsc.define_topic(topic)

        subscription = SubscriptionResource()
        subscription.topic1 = PubSubTopicResource.create(topicname,'')

        subscription.workflow = {
            'consumer1':
                {'module':'ion.services.dm.distribution.consumers.logging_consumer',
                 'consumerclass':'LoggingConsumer',\
                 'attach':'topic1'}
                }

        subscription = yield dpsc.define_subscription(subscription)

        logging.info('Defined subscription: '+str(subscription))

        params = {}
        params['publish-to'] = topic.RegistryIdentity
        yield self.driver_client.configure_driver(params)

        cmd1 = [['ds', 'now']]
        result = yield self.driver_client.execute(cmd1)
        self.assertEqual(result['status'], 'OK')

        yield pu.asleep(1)

        result = yield self.driver_client.disconnect(['some arg'])
    def test_create_topic(self):
        #dpsc = DataPubsubClient(self.pubsubSuper)

        dpsc = DataPubsubClient(self.sup)
        # Create and Register a topic
        """
        DHE: not sure the driver should be creating the topic; for right
        now I'll have the test case do it.
        """
        self.topic = PubSubTopicResource.create('SBE49 Topic',"oceans, oil spill")
        self.topic = yield dpsc.define_topic(self.topic)


        print 'TADA!'
Exemple #3
0
    def test_pubsub(self):

        dpsc = DataPubsubClient(self.sup)
        
        # Create and Register a topic
        topic = PubSubTopicResource.create('Davids Topic',"oceans, oil spill, fun things to do")        
        topic = yield dpsc.define_topic(topic)
        logging.info('Defined Topic: '+str(topic))

        #Create and register self.sup as a publisher
        publisher = PublisherResource.create('Test Publisher', self.sup, topic, 'DataObject')
        publisher = yield dpsc.define_publisher(publisher)

        logging.info('Defined Publisher: '+str(publisher))
        

        
        # === Create a Consumer and queues - this will become part of define_subscription.
        
        #Create two test queues - don't use topics to test the consumer
        # To be replaced when the subscription service is ready
        queue1=dataobject.create_unique_identity()
        queue_properties = {queue1:{'name_type':'fanout', 'args':{'scope':'global'}}}
        yield bootstrap.declare_messaging(queue_properties)

        queue2=dataobject.create_unique_identity()
        queue_properties = {queue2:{'name_type':'fanout', 'args':{'scope':'global'}}}
        yield bootstrap.declare_messaging(queue_properties)

        pd1={'name':'example_consumer_1',
                 'module':'ion.services.dm.distribution.consumers.forwarding_consumer',
                 'procclass':'ForwardingConsumer',
                 'spawnargs':{'attach':topic.queue.name,\
                              'process parameters':{},\
                              'delivery queues':{'queues':[queue1,queue2]}}\
                    }
        child1 = base_consumer.ConsumerDesc(**pd1)

        child1_id = yield self.test_sup.spawn_child(child1)

        # === End to be replaces with Define_Consumer


        # Create and send a data message
        data = {'Data':'in a dictionary'}
        result = yield dpsc.publish(self.sup, topic.reference(), data)
        if result:
            logging.info('Published Message')
        else:
            logging.info('Failed to Published Message')

        # Need to await the delivery of data messages into the (separate) consumers
        yield pu.asleep(1)

        msg_cnt = yield child1.get_msg_count()
        received = msg_cnt.get('received',{})
        sent = msg_cnt.get('sent',{})
        self.assertEqual(sent.get(queue1),1)
        self.assertEqual(sent.get(queue2),1)
        self.assertEqual(received.get(topic.queue.name),1)


        # === Create a Consumer - this will become part of define_subscription.
        
        pd2={'name':'example_consumer_2',
                 'module':'ion.services.dm.distribution.consumers.logging_consumer',
                 'procclass':'LoggingConsumer',
                 'spawnargs':{'attach':queue1,\
                              'process parameters':{},\
                              'delivery queues':{}}\
                    }
        child2 = base_consumer.ConsumerDesc(**pd2)

        child2_id = yield self.test_sup.spawn_child(child2)

        # === End of what will become part of the subscription definition

        # Send the simple message again
        result = yield dpsc.publish(self.sup, topic.reference(), data)
        
        # Need to await the delivery of data messages into the (separate) consumers
        yield pu.asleep(1)

        msg_cnt = yield child1.get_msg_count()

        received = msg_cnt.get('received',{})
        sent = msg_cnt.get('sent',{})
        self.assertEqual(sent.get(queue1),2)
        self.assertEqual(sent.get(queue2),2)
        self.assertEqual(received.get(topic.queue.name),2)
        
        msg_cnt = yield child2.get_msg_count()
        received = msg_cnt.get('received',{})
        sent = msg_cnt.get('sent',{})
        self.assertEqual(sent,{})
        self.assertEqual(received.get(queue1),1)
Exemple #4
0
    def test_exampleconsumer(self):
        '''
        @Brief Example Consumer is a demonstration of a more complex data consumer.
        It uses DAP data messages and provides qaqc and event results on two
        seperate queues.
        '''
        dpsc = DataPubsubClient(self.sup)
        
        #Create and register 3 topics!
        topic_raw = PubSubTopicResource.create("topic_raw","oceans, oil spill, fun things to do") 
        topic_raw = yield dpsc.define_topic(topic_raw)


        #Create and register self.sup as a publisher
        publisher = PublisherResource.create('Test Publisher', self.sup, topic_raw, 'DataObject')
        publisher = yield dpsc.define_publisher(publisher)

        logging.info('Defined Publisher: '+str(publisher))

        # === Create a Consumer and queues - this will become part of define_subscription.
        
        #Create two test queues - don't use topics to test the consumer
        # To be replaced when the subscription service is ready
        evt_queue=dataobject.create_unique_identity()
        queue_properties = {evt_queue:{'name_type':'fanout', 'args':{'scope':'global'}}}
        yield bootstrap.declare_messaging(queue_properties)

        pr_queue=dataobject.create_unique_identity()
        queue_properties = {pr_queue:{'name_type':'fanout', 'args':{'scope':'global'}}}
        yield bootstrap.declare_messaging(queue_properties)

        pd1={'name':'example_consumer_1',
                 'module':'ion.services.dm.distribution.consumers.example_consumer',
                 'procclass':'ExampleConsumer',
                 'spawnargs':{'attach':topic_raw.queue.name,\
                              'Process Parameters':{},\
                              'delivery queues':\
                              {'event_queue':evt_queue,\
                               'processed_queue':pr_queue}}\
                    }

        child1 = base_consumer.ConsumerDesc(**pd1)

        child1_id = yield self.test_sup.spawn_child(child1)


        pd2={'name':'example_consumer_2',
                 'module':'ion.services.dm.distribution.consumers.logging_consumer',
                 'procclass':'LoggingConsumer',
                 'spawnargs':{'attach':evt_queue,\
                              'Process Parameters':{}}\
                    }
        child2 = base_consumer.ConsumerDesc(**pd2)

        child2_id = yield self.test_sup.spawn_child(child2)

        pd3={'name':'example_consumer_3',
                 'module':'ion.services.dm.distribution.consumers.logging_consumer',
                 'procclass':'LoggingConsumer',
                 'spawnargs':{'attach':pr_queue,\
                              'Process Parameters':{}}\
                    }
        child3 = base_consumer.ConsumerDesc(**pd3)

        child3_id = yield self.test_sup.spawn_child(child3)

        # === End of stuff that will be replaced with Subscription method...


        # Create an example data message
        dmsg = dap_tools.simple_datamessage(\
            {'DataSet Name':'Simple Data','variables':\
                {'time':{'long_name':'Data and Time','units':'seconds'},\
                'height':{'long_name':'person height','units':'meters'}}}, \
            {'time':(101,102,103,104,105,106,107,108,109,110), \
            'height':(5,2,4,5,-1,9,3,888,3,4)})
        
        result = yield dpsc.publish(self.sup, topic_raw.reference(), dmsg)
        if result:
            logging.info('Published Message')
        else:
            logging.info('Failed to Published Message')


        # Need to await the delivery of data messages into the consumers
        yield pu.asleep(1)

        msg_cnt = yield child1.get_msg_count()
        received = msg_cnt.get('received',{})
        sent = msg_cnt.get('sent',{})
        self.assertEqual(sent.get(evt_queue),2)
        self.assertEqual(sent.get(pr_queue),1)
        self.assertEqual(received.get(topic_raw.queue.name),1)
        
        msg_cnt = yield child2.get_msg_count()
        received = msg_cnt.get('received',{})
        sent = msg_cnt.get('sent',{})
        self.assertEqual(sent,{})
        self.assertEqual(received.get(evt_queue),2)
        
        msg_cnt = yield child3.get_msg_count()
        received = msg_cnt.get('received',{})
        sent = msg_cnt.get('sent',{})
        self.assertEqual(sent,{})
        self.assertEqual(received.get(pr_queue),1)
        

        # Publish a second message with different data
        dmsg = dap_tools.simple_datamessage(\
            {'DataSet Name':'Simple Data','variables':\
                {'time':{'long_name':'Data and Time','units':'seconds'},\
                'height':{'long_name':'person height','units':'meters'}}}, \
            {'time':(111,112,123,114,115,116,117,118,119,120), \
            'height':(8,986,4,-2,-1,5,3,1,4,5)})
        
        result = yield dpsc.publish(self.sup, topic_raw.reference(), dmsg)

        # Need to await the delivery of data messages into the consumers
        yield pu.asleep(1)

        msg_cnt = yield child1.get_msg_count()
        received = msg_cnt.get('received',{})
        sent = msg_cnt.get('sent',{})
        self.assertEqual(sent.get(evt_queue),5)
        self.assertEqual(sent.get(pr_queue),2)
        self.assertEqual(received.get(topic_raw.queue.name),2)
        
        msg_cnt = yield child2.get_msg_count()
        received = msg_cnt.get('received',{})
        sent = msg_cnt.get('sent',{})
        self.assertEqual(sent,{})
        self.assertEqual(received.get(evt_queue),5)
        
        msg_cnt = yield child3.get_msg_count()
        received = msg_cnt.get('received',{})
        sent = msg_cnt.get('sent',{})
        self.assertEqual(sent,{})
        self.assertEqual(received.get(pr_queue),2)
class InstrumentManagementService(BaseService):
    """
    Instrument management service interface.
    This service provides overall coordination for instrument management within
    an observatory context. In particular it coordinates the access to the
    instrument and data product registries and the interaction with instrument
    agents.
    """

    # Declaration of service
    declare = BaseService.service_declare(name='instrument_management',
                                          version='0.1.0',
                                          dependencies=[])

    def slc_init(self):
        self.irc = InstrumentRegistryClient(proc=self)
        self.dprc = DataProductRegistryClient(proc=self)
        self.arc = AgentRegistryClient(proc=self)
        self.dpsc = DataPubsubClient(proc=self)

    @defer.inlineCallbacks
    def op_create_new_instrument(self, content, headers, msg):
        """
        Service operation: Accepts a dictionary containing user inputs.
        Updates the instrument registry.
        """
        userInput = content['userInput']

        newinstrument = InstrumentResource.create_new_resource()

        if 'name' in userInput:
            newinstrument.name = str(userInput['name'])

        if 'description' in userInput:
            newinstrument.description = str(userInput['description'])

        if 'manufacturer' in userInput:
            newinstrument.manufacturer = str(userInput['manufacturer'])

        if 'model' in userInput:
            newinstrument.model = str(userInput['model'])

        if 'serial_num' in userInput:
            newinstrument.serial_num = str(userInput['serial_num'])

        if 'fw_version' in userInput:
            newinstrument.fw_version = str(userInput['fw_version'])

        instrument_res = yield self.irc.register_instrument_instance(newinstrument)

        yield self.reply_ok(msg, instrument_res.encode())

    @defer.inlineCallbacks
    def op_create_new_data_product(self, content, headers, msg):
        """
        Service operation: Accepts a dictionary containing user inputs.
        Updates the data product registry. Also sets up an ingestion pipeline
        for an instrument
        """
        dataProductInput = content['dataProductInput']

        newdp = DataProductResource.create_new_resource()
        if 'instrumentID' in dataProductInput:
            inst_id = str(dataProductInput['instrumentID'])
            int_ref = ResourceReference(RegistryIdentity=inst_id, RegistryBranch='master')
            newdp.instrument_ref = int_ref

        if 'name' in dataProductInput:
            newdp.name = str(dataProductInput['name'])

        if 'description' in dataProductInput:
            newdp.description = str(dataProductInput['description'])

        if 'dataformat' in dataProductInput:
            newdp.dataformat = str(dataProductInput['dataformat'])

        # Step: Create a data stream
        ## Instantiate a pubsubclient
        #self.dpsc = DataPubsubClient(proc=self)
        #
        ## Create and Register a topic
        #self.topic = PubSubTopicResource.create('SBE49 Topic',"oceans, oil spill")
        #self.topic = yield self.dpsc.define_topic(self.topic)
        #logging.debug('DHE: Defined Topic')
        #
        #self.publisher = PublisherResource.create('Test Publisher', self, self.topic, 'DataObject')
        #self.publisher = yield self.dpsc.define_publisher(self.publisher)


        res = yield self.dprc.register_data_product(newdp)
        ref = res.reference(head=True)

        yield self.reply_ok(msg, res.encode())

    @defer.inlineCallbacks
    def op_execute_command(self, content, headers, msg):
        """
        Service operation: Execute a command on an instrument.
        """

        # Step 1: Extract the arguments from the UI generated message content
        commandInput = content['commandInput']

        if 'instrumentID' in commandInput:
            inst_id = str(commandInput['instrumentID'])
        else:
            raise ValueError("Input for instrumentID not present")

        command = []
        if 'command' in commandInput:
            command_op = str(commandInput['command'])
        else:
            raise ValueError("Input for command not present")

        command.append(command_op)

        arg_idx = 0
        while True:
            argname = 'cmdArg'+str(arg_idx)
            arg_idx += 1
            if argname in commandInput:
                command.append(str(commandInput[argname]))
            else:
                break

        # Step 2: Find the agent id for the given instrument id
        agent_pid  = yield self.get_agent_pid_for_instrument(inst_id)
        if not agent_pid:
            yield self.reply_err(msg, "No agent found for instrument "+str(inst_id))
            defer.returnValue(None)

        # Step 3: Interact with the agent to execute the command
        iaclient = InstrumentAgentClient(proc=self, target=agent_pid)
        commandlist = [command,]
        logging.info("Sending command to IA: "+str(commandlist))
        cmd_result = yield iaclient.execute_instrument(commandlist)

        yield self.reply_ok(msg, cmd_result)

    @defer.inlineCallbacks
    def op_get_instrument_state(self, content, headers, msg):
        """
        Service operation: .
        """
        # Step 1: Extract the arguments from the UI generated message content
        commandInput = content['commandInput']

        if 'instrumentID' in commandInput:
            inst_id = str(commandInput['instrumentID'])
        else:
            raise ValueError("Input for instrumentID not present")

        agent_pid = yield self.get_agent_pid_for_instrument(inst_id)
        if not agent_pid:
            raise StandardError("No agent found for instrument "+str(inst_id))

        iaclient = InstrumentAgentClient(proc=self, target=agent_pid)
        inst_cap = yield iaclient.get_capabilities()
        if not inst_cap:
            raise StandardError("No capabilities available for instrument "+str(inst_id))

        ci_commands = inst_cap['ci_commands']
        instrument_commands = inst_cap['instrument_commands']
        instrument_parameters = inst_cap['instrument_parameters']
        ci_parameters = inst_cap['ci_parameters']

        values = yield iaclient.get_from_instrument(instrument_parameters)
        resvalues = {}
        if values:
            resvalues = values

        yield self.reply_ok(msg, resvalues)

    @defer.inlineCallbacks
    def op_start_instrument_agent(self, content, headers, msg):
        """
        Service operation: Starts an instrument agent for a type of
        instrument.
        """
        if 'instrumentID' in content:
            inst_id = str(content['instrumentID'])
        else:
            raise ValueError("Input for instrumentID not present")

        if 'model' in content:
            model = str(content['model'])
        else:
            raise ValueError("Input for model not present")

        if model != 'SBE49':
            raise ValueError("Only SBE49 supported!")

        agent_pid = yield self.get_agent_pid_for_instrument(inst_id)
        if agent_pid:
            raise StandardError("Agent already started for instrument "+str(inst_id))

        simulator = Simulator(inst_id)
        simulator.start()

        topicname = "Inst/RAW/"+inst_id
        topic = PubSubTopicResource.create(topicname,"")

        # Use the service to create a queue and register the topic
        topic = yield self.dpsc.define_topic(topic)

        iagent_args = {}
        iagent_args['instrument-id'] = inst_id
        driver_args = {}
        driver_args['port'] = simulator.port
        driver_args['publish-to'] = topic.RegistryIdentity
        iagent_args['driver-args'] = driver_args

        iapd = ProcessDesc(**{'name':'SBE49IA',
                  'module':'ion.agents.instrumentagents.SBE49_IA',
                  'class':'SBE49InstrumentAgent',
                  'spawnargs':iagent_args})

        iagent_id = yield self.spawn_child(iapd)
        iaclient = InstrumentAgentClient(proc=self, target=iagent_id)
        yield iaclient.register_resource(inst_id)

        yield self.reply_ok(msg, "OK")

    @defer.inlineCallbacks
    def op_stop_instrument_agent(self, content, headers, msg):
        """
        Service operation: Starts direct access mode.
        """
        yield self.reply_err(msg, "Not yet implemented")


    @defer.inlineCallbacks
    def op_start_direct_access(self, content, headers, msg):
        """
        Service operation: Starts direct access mode.
        """
        yield self.reply_err(msg, "Not yet implemented")

    @defer.inlineCallbacks
    def op_stop_direct_access(self, content, headers, msg):
        """
        Service operation: Stops direct access mode.
        """
        yield self.reply_err(msg, "Not yet implemented")

    @defer.inlineCallbacks
    def get_agent_desc_for_instrument(self, instrument_id):
        logging.info("get_agent_desc_for_instrument() instrumentID="+str(instrument_id))
        int_ref = ResourceReference(RegistryIdentity=instrument_id, RegistryBranch='master')
        agent_query = InstrumentAgentResourceInstance()
        agent_query.instrument_ref = int_ref


        if not agent_res:
            defer.returnValue(None)
        agent_pid = agent_res.proc_id
        logging.info("Agent process id for instrument id %s is: %s" % (instrument_id, agent_pid))
        defer.returnValue(agent_pid)

    @defer.inlineCallbacks
    def get_agent_for_instrument(self, instrument_id):
        logging.info("get_agent_for_instrument() instrumentID="+str(instrument_id))
        int_ref = ResourceReference(RegistryIdentity=instrument_id, RegistryBranch='master')
        agent_query = InstrumentAgentResourceInstance()
        agent_query.instrument_ref = int_ref
        # @todo Need to list the LC state here. WHY???
        agent_query.lifecycle = LCStates.developed
        agents = yield self.arc.find_registered_agent_instance_from_description(agent_query, regex=False)
        logging.info("Found %s agent instances for instrument id %s" % (len(agents), instrument_id))
        agent_res = None
        if len(agents) > 0:
            agent_res = agents[0]
        defer.returnValue(agent_res)

    @defer.inlineCallbacks
    def get_agent_pid_for_instrument(self, instrument_id):
        agent_res = yield self.get_agent_for_instrument(instrument_id)
        if not agent_res:
            defer.returnValue(None)
        agent_pid = agent_res.proc_id
        logging.info("Agent process id for instrument id %s is: %s" % (instrument_id, agent_pid))
        defer.returnValue(agent_pid)