class TestAgentLaunchOps(IonIntegrationTestCase):

    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()
        #container = Container()
        #print 'starting container'
        #container.start()
        #print 'started container'
        unittest # suppress an pycharm inspector error if all unittest.skip references are commented out

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        self.RR   = ResourceRegistryServiceClient(node=self.container.node)
        self.IMS  = InstrumentManagementServiceClient(node=self.container.node)
        self.IDS  = IdentityManagementServiceClient(node=self.container.node)
        self.PSC  = PubsubManagementServiceClient(node=self.container.node)
        self.DP   = DataProductManagementServiceClient(node=self.container.node)
        self.DAMS = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.DSC  = DatasetManagementServiceClient(node=self.container.node)
        self.PDC  = ProcessDispatcherServiceClient(node=self.container.node)
        self.OMS = ObservatoryManagementServiceClient(node=self.container.node)
        self.RR2 = EnhancedResourceRegistryClient(self.RR)

#    @unittest.skip('this test just for debugging setup')
#    def test_just_the_setup(self):
#        return


    def test_get_agent_client_noprocess(self):
        inst_device_id = self.RR2.create(any_old(RT.InstrumentDevice))

        iap = ResourceAgentClient._get_agent_process_id(inst_device_id)

        # should be no running agent
        self.assertIsNone(iap)

        # should raise NotFound
        self.assertRaises(NotFound, ResourceAgentClient, inst_device_id)



    def test_resource_state_save_restore(self):

        # Create InstrumentModel
        instModel_obj = IonObject(RT.InstrumentModel,
                                  name='SBE37IMModel',
                                  description="SBE37IMModel")
        instModel_id = self.IMS.create_instrument_model(instModel_obj)
        log.debug( 'new InstrumentModel id = %s ', instModel_id)

        # Create InstrumentAgent
        raw_config = StreamConfiguration(stream_name='raw', parameter_dictionary_name='ctd_raw_param_dict' )
        parsed_config = StreamConfiguration(stream_name='parsed', parameter_dictionary_name='ctd_parsed_param_dict')
        instAgent_obj = IonObject(RT.InstrumentAgent,
                                  name='agent007',
                                  description="SBE37IMAgent",
                                  driver_uri=DRV_URI_GOOD,
                                  stream_configurations = [raw_config, parsed_config] )
        instAgent_id = self.IMS.create_instrument_agent(instAgent_obj)
        log.debug( 'new InstrumentAgent id = %s', instAgent_id)

        self.IMS.assign_instrument_model_to_instrument_agent(instModel_id, instAgent_id)

        # Create InstrumentDevice
        log.debug('test_activateInstrumentSample: Create instrument resource to represent the SBE37 '
        + '(SA Req: L4-CI-SA-RQ-241) ')
        instDevice_obj = IonObject(RT.InstrumentDevice,
                                   name='SBE37IMDevice',
                                   description="SBE37IMDevice",
                                   serial_number="12345" )
        instDevice_id = self.IMS.create_instrument_device(instrument_device=instDevice_obj)
        self.IMS.assign_instrument_model_to_instrument_device(instModel_id, instDevice_id)

        log.debug("test_activateInstrumentSample: new InstrumentDevice id = %s    (SA Req: L4-CI-SA-RQ-241) ",
                  instDevice_id)

        port_agent_config = {
            'device_addr':  CFG.device.sbe37.host,
            'device_port':  CFG.device.sbe37.port,
            'process_type': PortAgentProcessType.UNIX,
            'binary_path': "port_agent",
            'port_agent_addr': 'localhost',
            'command_port': CFG.device.sbe37.port_agent_cmd_port,
            'data_port': CFG.device.sbe37.port_agent_data_port,
            'log_level': 5,
            'type': PortAgentType.ETHERNET
        }

        instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='SBE37IMAgentInstance',
                                          description="SBE37IMAgentInstance",
                                          port_agent_config = port_agent_config)


        instAgentInstance_id = self.IMS.create_instrument_agent_instance(instAgentInstance_obj,
                                                                               instAgent_id,
                                                                               instDevice_id)

        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()


        spdict_id = self.DSC.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        parsed_stream_def_id = self.PSC.create_stream_definition(name='parsed', parameter_dictionary_id=spdict_id)

        rpdict_id = self.DSC.read_parameter_dictionary_by_name('ctd_raw_param_dict', id_only=True)
        raw_stream_def_id = self.PSC.create_stream_definition(name='raw', parameter_dictionary_id=rpdict_id)


        #-------------------------------
        # Create Raw and Parsed Data Products for the device
        #-------------------------------

        dp_obj = IonObject(RT.DataProduct,
                           name='the parsed data',
                           description='ctd stream test',
                           temporal_domain = tdom,
                           spatial_domain = sdom)

        data_product_id1 = self.DP.create_data_product(data_product=dp_obj,
                                                       stream_definition_id=parsed_stream_def_id)
                                                       
        log.debug( 'new dp_id = %s', data_product_id1)

        self.DAMS.assign_data_product(input_resource_id=instDevice_id, data_product_id=data_product_id1)
        self.DP.activate_data_product_persistence(data_product_id=data_product_id1)
        self.addCleanup(self.DP.suspend_data_product_persistence, data_product_id1)



        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.RR.find_objects(data_product_id1, PRED.hasStream, None, True)
        log.debug( 'Data product streams1 = %s', stream_ids)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        dataset_ids, _ = self.RR.find_objects(data_product_id1, PRED.hasDataset, RT.Dataset, True)
        log.debug( 'Data set for data_product_id1 = %s', dataset_ids[0])
        self.parsed_dataset = dataset_ids[0]
        #create the datastore at the beginning of each int test that persists data



        dp_obj = IonObject(RT.DataProduct,
                           name='the raw data',
                           description='raw stream test',
                           temporal_domain = tdom,
                           spatial_domain = sdom)

        data_product_id2 = self.DP.create_data_product(data_product=dp_obj,
                                                       stream_definition_id=raw_stream_def_id)
        log.debug( 'new dp_id = %s', str(data_product_id2))

        self.DAMS.assign_data_product(input_resource_id=instDevice_id, data_product_id=data_product_id2)

        self.DP.activate_data_product_persistence(data_product_id=data_product_id2)
        self.addCleanup(self.DP.suspend_data_product_persistence, data_product_id2)

        # spin up agent
        self.IMS.start_instrument_agent_instance(instrument_agent_instance_id=instAgentInstance_id)


        self.addCleanup(self.IMS.stop_instrument_agent_instance,
                        instrument_agent_instance_id=instAgentInstance_id)

        #wait for start
        instance_obj = self.IMS.read_instrument_agent_instance(instAgentInstance_id)
        gate = AgentProcessStateGate(self.PDC.read_process,
                                     instDevice_id,
                                     ProcessStateEnum.RUNNING)
        self.assertTrue(gate.await(30), "The instrument agent instance (%s) did not spawn in 30 seconds" %
                                        gate.process_id)


        # take snapshot of config
        snap_id = self.IMS.save_resource_state(instDevice_id, "xyzzy snapshot")
        snap_obj = self.RR.read_attachment(snap_id, include_content=True)


        #modify config
        instance_obj.driver_config["comms_config"] = "BAD_DATA"
        self.RR.update(instance_obj)

        #restore config
        self.IMS.restore_resource_state(instDevice_id, snap_id)
        instance_obj = self.RR.read(instAgentInstance_id)

        if "BAD_DATA" == instance_obj.driver_config["comms_config"]:
            print "Saved config:"
            print snap_obj.content
            self.fail("Saved config was not properly restored")

        self.assertNotEqual("BAD_DATA", instance_obj.driver_config["comms_config"])

        
        self.DP.delete_data_product(data_product_id1)
        self.DP.delete_data_product(data_product_id2)


    def test_agent_instance_config_hasDevice(self):
        def assign_fn(child_device_id, parent_device_id):
            self.RR2.create_association(parent_device_id, PRED.hasDevice, child_device_id)

        def find_fn(parent_device_id):
            ret, _ = self.RR.find_objects(subject=parent_device_id, predicate=PRED.hasDevice, id_only=True)
            return ret

        self.base_agent_instance_config(assign_fn, find_fn)
        log.info("END test_agent_instance_config_hasDevice")

    def test_agent_instance_config_hasNetworkParent(self):
        def assign_fn(child_device_id, parent_device_id):
            self.RR2.create_association(child_device_id, PRED.hasNetworkParent, parent_device_id)

        def find_fn(parent_device_id):
            ret, _ = self.RR.find_subjects(object=parent_device_id, predicate=PRED.hasNetworkParent, id_only=True)
            return ret

        self.base_agent_instance_config(assign_fn, find_fn)
        log.info("END test_agent_instance_config_hasNetworkParent")

    def base_agent_instance_config(self, 
                                   assign_child_platform_to_parent_platform_fn, 
                                   find_child_platform_ids_of_parent_platform_fn):
        """
        Verify that agent configurations are being built properly
        """
        clients = DotDict()
        clients.resource_registry  = self.RR
        clients.pubsub_management  = self.PSC
        clients.dataset_management = self.DSC
        config_builder = DotDict
        config_builder.i = None
        config_builder.p = None

        def refresh_pconfig_builder_hack(config_builder):
            """
            ugly hack to get around "idempotent" RR2 caching
            remove after https://github.com/ooici/coi-services/pull/1190
            """
            config_builder.p = PlatformAgentConfigurationBuilder(clients)

        def refresh_iconfig_builder_hack(config_builder):
            """
            ugly hack to get around "idempotent" RR2 caching
            remove after https://github.com/ooici/coi-services/pull/1190
            """
            config_builder.i = InstrumentAgentConfigurationBuilder(clients)


        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()

        org_obj = any_old(RT.Org)
        org_id = self.RR2.create(org_obj)

        inst_startup_config = {'startup': 'config'}

        generic_alerts_config = [ {'lvl2': 'lvl3val'} ]

        required_config_keys = [
            'org_governance_name',
            'device_type',
            'agent',
            'driver_config',
            'stream_config',
            'startup_config',
            'aparam_alerts_config',
            'children']



        def verify_instrument_config(config, device_id):
            for key in required_config_keys:
                self.assertIn(key, config)
            self.assertEqual(org_obj.org_governance_name, config['org_governance_name'])
            self.assertEqual(RT.InstrumentDevice, config['device_type'])
            self.assertIn('driver_config', config)
            driver_config = config['driver_config']
            expected_driver_fields = {'process_type': ('ZMQPyClassDriverLauncher',),
                                      }
            for k, v in expected_driver_fields.iteritems():
                self.assertIn(k, driver_config)
                self.assertEqual(v, driver_config[k])
            self.assertEqual

            self.assertEqual({'resource_id': device_id}, config['agent'])
            self.assertEqual(inst_startup_config, config['startup_config'])
            self.assertIn('aparam_alerts_config', config)
            self.assertEqual(generic_alerts_config, config['aparam_alerts_config'])
            self.assertIn('stream_config', config)
            for key in ['children']:
                self.assertEqual({}, config[key])


        def verify_child_config(config, device_id, inst_device_id=None):
            for key in required_config_keys:
                self.assertIn(key, config)
            self.assertEqual(org_obj.org_governance_name, config['org_governance_name'])
            self.assertEqual(RT.PlatformDevice, config['device_type'])
            self.assertEqual({'resource_id': device_id}, config['agent'])
            self.assertIn('aparam_alerts_config', config)
            self.assertEqual(generic_alerts_config, config['aparam_alerts_config'])
            self.assertIn('stream_config', config)
            self.assertIn('driver_config', config)
            self.assertIn('foo', config['driver_config'])
            self.assertIn('ports', config['driver_config'])
            self.assertEqual('bar', config['driver_config']['foo'])
            self.assertIn('process_type', config['driver_config'])
            self.assertEqual(('ZMQPyClassDriverLauncher',), config['driver_config']['process_type'])

            if None is inst_device_id:
                for key in ['children', 'startup_config']:
                    self.assertEqual({}, config[key])
            else:
                for key in ['startup_config']:
                    self.assertEqual({}, config[key])

                self.assertIn(inst_device_id, config['children'])
                verify_instrument_config(config['children'][inst_device_id], inst_device_id)

            if config['driver_config']['ports']:
                self.assertTrue( isinstance(config['driver_config']['ports'], dict) )



        def verify_parent_config(config, parent_device_id, child_device_id, inst_device_id=None):
            for key in required_config_keys:
                self.assertIn(key, config)
            self.assertEqual(org_obj.org_governance_name, config['org_governance_name'])
            self.assertEqual(RT.PlatformDevice, config['device_type'])
            self.assertIn('process_type', config['driver_config'])
            self.assertIn('ports', config['driver_config'])
            self.assertEqual(('ZMQPyClassDriverLauncher',), config['driver_config']['process_type'])
            self.assertEqual({'resource_id': parent_device_id}, config['agent'])
            self.assertIn('aparam_alerts_config', config)
            self.assertEqual(generic_alerts_config, config['aparam_alerts_config'])
            self.assertIn('stream_config', config)
            for key in ['startup_config']:
                self.assertEqual({}, config[key])

            if config['driver_config']['ports']:
                self.assertTrue( isinstance(config['driver_config']['ports'], dict) )

            self.assertIn(child_device_id, config['children'])
            verify_child_config(config['children'][child_device_id], child_device_id, inst_device_id)






        rpdict_id = self.DSC.read_parameter_dictionary_by_name('ctd_raw_param_dict', id_only=True)
        raw_stream_def_id = self.PSC.create_stream_definition(name='raw', parameter_dictionary_id=rpdict_id)
        #todo: create org and figure out which agent resource needs to get assigned to it


        def _make_platform_agent_structure(name='', agent_config=None):
            if None is agent_config: agent_config = {}

            # instance creation
            platform_agent_instance_obj = any_old(RT.PlatformAgentInstance, {'driver_config': {'foo': 'bar'},
                                                                             'alerts': generic_alerts_config})
            platform_agent_instance_obj.agent_config = agent_config
            platform_agent_instance_id = self.IMS.create_platform_agent_instance(platform_agent_instance_obj)

            # agent creation
            raw_config = StreamConfiguration(stream_name='raw', parameter_dictionary_name='ctd_raw_param_dict' )
            platform_agent_obj = any_old(RT.PlatformAgent, {"stream_configurations":[raw_config]})
            platform_agent_id = self.IMS.create_platform_agent(platform_agent_obj)

            # device creation
            platform_device_id = self.IMS.create_platform_device(any_old(RT.PlatformDevice))

            # data product creation
            dp_obj = any_old(RT.DataProduct, {"temporal_domain":tdom, "spatial_domain": sdom})
            dp_id = self.DP.create_data_product(data_product=dp_obj, stream_definition_id=raw_stream_def_id)
            self.DAMS.assign_data_product(input_resource_id=platform_device_id, data_product_id=dp_id)
            self.DP.activate_data_product_persistence(data_product_id=dp_id)
            self.addCleanup(self.DP.suspend_data_product_persistence, dp_id)


            #deployment creation
            site_obj = IonObject(RT.PlatformSite, name='sitePlatform')
            site_id = self.OMS.create_platform_site(platform_site=site_obj)

            # find current deployment using time constraints
            current_time =  int( calendar.timegm(time.gmtime()) )
            # two years on either side of current time
            start = current_time - 63115200
            end = current_time + 63115200
            temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=str(start), end_datetime=str(end))
            platform_port_obj= IonObject(OT.PlatformPort, reference_designator = 'GA01SUMO-FI003-09-CTDMO0999',
                                                            port_type=PortTypeEnum.UPLINK,
                                                            ip_address=0)
            deployment_obj = IonObject(RT.Deployment,
                                       name='TestPlatformDeployment_' + name,
                                       description='some new deployment',
                                       context=IonObject(OT.CabledNodeDeploymentContext),
                                       constraint_list=[temporal_bounds],
                                       port_assignments={platform_device_id:platform_port_obj})

            deploy_id = self.OMS.create_deployment(deployment=deployment_obj, site_id=site_id, device_id=platform_device_id)

            # assignments
            self.RR2.assign_platform_agent_instance_to_platform_device_with_has_agent_instance(platform_agent_instance_id, platform_device_id)
            self.RR2.assign_platform_agent_to_platform_agent_instance_with_has_agent_definition(platform_agent_id, platform_agent_instance_id)
            self.RR2.assign_platform_device_to_org_with_has_resource(platform_agent_instance_id, org_id)

            return platform_agent_instance_id, platform_agent_id, platform_device_id


        def _make_instrument_agent_structure(agent_config=None):
            if None is agent_config: agent_config = {}

            # instance creation
            instrument_agent_instance_obj = any_old(RT.InstrumentAgentInstance, {"startup_config": inst_startup_config,
                                                                                 'alerts': generic_alerts_config})
            instrument_agent_instance_obj.agent_config = agent_config
            instrument_agent_instance_id = self.IMS.create_instrument_agent_instance(instrument_agent_instance_obj)

            # agent creation
            raw_config = StreamConfiguration(stream_name='raw',
                                             parameter_dictionary_name='ctd_raw_param_dict' )
            instrument_agent_obj = any_old(RT.InstrumentAgent, {"stream_configurations":[raw_config]})
            instrument_agent_id = self.IMS.create_instrument_agent(instrument_agent_obj)

            # device creation
            instrument_device_id = self.IMS.create_instrument_device(any_old(RT.InstrumentDevice))

            # data product creation
            dp_obj = any_old(RT.DataProduct, {"temporal_domain":tdom, "spatial_domain": sdom})
            dp_id = self.DP.create_data_product(data_product=dp_obj, stream_definition_id=raw_stream_def_id)
            self.DAMS.assign_data_product(input_resource_id=instrument_device_id, data_product_id=dp_id)
            self.DP.activate_data_product_persistence(data_product_id=dp_id)
            self.addCleanup(self.DP.suspend_data_product_persistence, dp_id)

            #deployment creation
            site_obj = IonObject(RT.InstrumentSite, name='siteInstrument')
            site_id = self.OMS.create_instrument_site(instrument_site =site_obj)

            # find current deployment using time constraints
            current_time =  int( calendar.timegm(time.gmtime()) )
            # two years on either side of current time
            start = current_time - 63115200
            end = current_time + 63115200
            temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=str(start), end_datetime=str(end))
            platform_port_obj= IonObject(OT.PlatformPort, reference_designator = 'GA01SUMO-FI003-08-CTDMO0888',
                                                            port_type=PortTypeEnum.PAYLOAD,
                                                            ip_address=0)
            deployment_obj = IonObject(RT.Deployment,
                                       name='TestDeployment for Cabled Instrument',
                                       description='some new deployment',
                                       context=IonObject(OT.CabledInstrumentDeploymentContext),
                                       constraint_list=[temporal_bounds],
                                       port_assignments={instrument_device_id:platform_port_obj})

            deploy_id = self.OMS.create_deployment(deployment=deployment_obj, site_id=site_id, device_id=instrument_device_id)


            # assignments
            self.RR2.assign_instrument_agent_instance_to_instrument_device_with_has_agent_instance(instrument_agent_instance_id, instrument_device_id)
            self.RR2.assign_instrument_agent_to_instrument_agent_instance_with_has_agent_definition(instrument_agent_id, instrument_agent_instance_id)
            self.RR2.assign_instrument_device_to_org_with_has_resource(instrument_agent_instance_id, org_id)

            return instrument_agent_instance_id, instrument_agent_id, instrument_device_id



        # can't do anything without an agent instance obj
        log.debug("Testing that preparing a launcher without agent instance raises an error")
        refresh_pconfig_builder_hack(config_builder) # associations have changed since builder was instantiated
        self.assertRaises(AssertionError, config_builder.p.prepare, will_launch=False)

        log.debug("Making the structure for a platform agent, which will be the child")
        platform_agent_instance_child_id, _, platform_device_child_id  = _make_platform_agent_structure(name='child')
        platform_agent_instance_child_obj = self.RR2.read(platform_agent_instance_child_id)

        log.debug("Preparing a valid agent instance launch, for config only")
        refresh_pconfig_builder_hack(config_builder) # associations have changed since builder was instantiated
        config_builder.p.set_agent_instance_object(platform_agent_instance_child_obj)
        child_config = config_builder.p.prepare(will_launch=False)
        verify_child_config(child_config, platform_device_child_id)


        log.debug("Making the structure for a platform agent, which will be the parent")
        platform_agent_instance_parent_id, _, platform_device_parent_id  = _make_platform_agent_structure(name='parent')
        platform_agent_instance_parent_obj = self.RR2.read(platform_agent_instance_parent_id)

        log.debug("Testing child-less parent as a child config")
        refresh_pconfig_builder_hack(config_builder) # associations have changed since builder was instantiated
        config_builder.p.set_agent_instance_object(platform_agent_instance_parent_obj)
        parent_config = config_builder.p.prepare(will_launch=False)
        verify_child_config(parent_config, platform_device_parent_id)

        log.debug("assigning child platform to parent")
        assign_child_platform_to_parent_platform_fn(platform_device_child_id, platform_device_parent_id)

        child_device_ids = find_child_platform_ids_of_parent_platform_fn(platform_device_parent_id)
        self.assertNotEqual(0, len(child_device_ids))

        log.debug("Testing parent + child as parent config")
        refresh_pconfig_builder_hack(config_builder) # associations have changed since builder was instantiated
        config_builder.p.set_agent_instance_object(platform_agent_instance_parent_obj)
        parent_config = config_builder.p.prepare(will_launch=False)
        verify_parent_config(parent_config, platform_device_parent_id, platform_device_child_id)


        log.debug("making the structure for an instrument agent")
        instrument_agent_instance_id, _, instrument_device_id = _make_instrument_agent_structure()
        instrument_agent_instance_obj = self.RR2.read(instrument_agent_instance_id)

        log.debug("Testing instrument config")
        refresh_iconfig_builder_hack(config_builder) # associations have changed since builder was instantiated
        config_builder.i.set_agent_instance_object(instrument_agent_instance_obj)
        instrument_config = config_builder.i.prepare(will_launch=False)
        verify_instrument_config(instrument_config, instrument_device_id)

        log.debug("assigning instrument to platform")
        self.RR2.assign_instrument_device_to_platform_device_with_has_device(instrument_device_id, platform_device_child_id)

        child_device_ids = self.RR2.find_instrument_device_ids_of_platform_device_using_has_device(platform_device_child_id)
        self.assertNotEqual(0, len(child_device_ids))

        log.debug("Testing entire config")
        refresh_pconfig_builder_hack(config_builder) # associations have changed since builder was instantiated
        config_builder.p.set_agent_instance_object(platform_agent_instance_parent_obj)
        full_config = config_builder.p.prepare(will_launch=False)
        verify_parent_config(full_config, platform_device_parent_id, platform_device_child_id, instrument_device_id)

        #self.fail(parent_config)
        #plauncher.prepare(will_launch=False)
        log.info("END base_agent_instance_config")
class BaseIntTestPlatform(IonIntegrationTestCase, HelperTestMixin):
    """
    A base class with several conveniences supporting specific platform agent
    integration tests, see:
    - ion/agents/platform/test/test_platform_agent_with_rsn.py
    - ion/services/sa/observatory/test/test_platform_launch.py

    The platform IDs used here are organized as follows:
      Node1D -> MJ01C -> LJ01D

    where -> goes from parent platform to child platform.

    This is a subset of the whole topology defined in the simulated platform
    network (network.yml), which in turn is used by the RSN OMS simulator.

    - 'LJ01D'  is the root platform used in test_single_platform
    - 'Node1D' is the root platform used in test_hierarchy

    Methods are provided to construct specific platform topologies, but
    subclasses decide which to use.
    """

    @classmethod
    def setUpClass(cls):
        HelperTestMixin.setUpClass()

    def setUp(self):
        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.RR   = ResourceRegistryServiceClient(node=self.container.node)
        self.IMS  = InstrumentManagementServiceClient(node=self.container.node)
        self.DAMS = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.DP   = DataProductManagementServiceClient(node=self.container.node)
        self.PSC  = PubsubManagementServiceClient(node=self.container.node)
        self.PDC  = ProcessDispatcherServiceClient(node=self.container.node)
        self.DSC  = DatasetManagementServiceClient()
        self.IDS  = IdentityManagementServiceClient(node=self.container.node)
        self.RR2  = EnhancedResourceRegistryClient(self.RR)

        self.org_id = self.RR2.create(any_old(RT.Org))
        log.debug("Org created: %s", self.org_id)

        # Create InstrumentModel
        # TODO create multiple models as needed; for the moment assuming all
        # used instruments are the same model here.
        instModel_obj = IonObject(RT.InstrumentModel,
                                  name='SBE37IMModel',
                                  description="SBE37IMModel")
        self.instModel_id = self.IMS.create_instrument_model(instModel_obj)
        log.debug('new InstrumentModel id = %s ', self.instModel_id)

        # Use the network definition provided by RSN OMS directly.
        rsn_oms = CIOMSClientFactory.create_instance(DVR_CONFIG['oms_uri'])
        self._network_definition = RsnOmsUtil.build_network_definition(rsn_oms)
        CIOMSClientFactory.destroy_instance(rsn_oms)

        if log.isEnabledFor(logging.TRACE):
            # show serialized version for the network definition:
            network_definition_ser = NetworkUtil.serialize_network_definition(self._network_definition)
            log.trace("NetworkDefinition serialization:\n%s", network_definition_ser)

        # set attributes for the platforms:
        self._platform_attributes = {}
        for platform_id in self._network_definition.pnodes:
            pnode = self._network_definition.pnodes[platform_id]
            dic = dict((attr.attr_id, attr.defn) for attr in pnode.attrs.itervalues())
            self._platform_attributes[platform_id] = dic
        log.trace("_platform_attributes: %s", self._platform_attributes)

        # set ports for the platforms:
        self._platform_ports = {}
        for platform_id in self._network_definition.pnodes:
            pnode = self._network_definition.pnodes[platform_id]
            dic = {}
            for port_id, port in pnode.ports.iteritems():
                dic[port_id] = dict(port_id=port_id,
                                    network=port.network)
            self._platform_ports[platform_id] = dic
        log.trace("_platform_ports: %s", self._platform_attributes)

        self._async_data_result = AsyncResult()
        self._data_subscribers = []
        self._samples_received = []
        self.addCleanup(self._stop_data_subscribers)

        self._async_event_result = AsyncResult()
        self._event_subscribers = []
        self._events_received = []
        self.addCleanup(self._stop_event_subscribers)
        self._start_event_subscriber(sub_type="platform_event")

        # instruments that have been set up: instr_key: i_obj
        self._setup_instruments = {}

    #################################################################
    # data subscribers handling
    #################################################################

    def _start_data_subscriber(self, stream_name, stream_id):
        """
        Starts data subscriber for the given stream_name and stream_config
        """

        def consume_data(message, stream_route, stream_id):
            # A callback for processing subscribed-to data.
            log.info('Subscriber received data message: %s. stream_name=%r stream_id=%r',
                     str(message), stream_name, stream_id)
            self._samples_received.append(message)
            self._async_data_result.set()

        log.info('_start_data_subscriber stream_name=%r stream_id=%r',
                 stream_name, stream_id)

        # Create subscription for the stream
        exchange_name = '%s_queue' % stream_name
        self.container.ex_manager.create_xn_queue(exchange_name).purge()
        sub = StandaloneStreamSubscriber(exchange_name, consume_data)
        sub.start()
        self._data_subscribers.append(sub)
        sub_id = self.PSC.create_subscription(name=exchange_name, stream_ids=[stream_id])
        self.PSC.activate_subscription(sub_id)
        sub.subscription_id = sub_id

    def _stop_data_subscribers(self):
        """
        Stop the data subscribers on cleanup.
        """
        try:
            for sub in self._data_subscribers:
                if hasattr(sub, 'subscription_id'):
                    try:
                        self.PSC.deactivate_subscription(sub.subscription_id)
                    except:
                        pass
                    self.PSC.delete_subscription(sub.subscription_id)
                sub.stop()
        finally:
            self._data_subscribers = []

    #################################################################
    # event subscribers handling
    #################################################################

    def _start_event_subscriber(self, event_type="DeviceEvent",
                                sub_type=None,
                                count=0):
        """
        Starts event subscriber for events of given event_type ("DeviceEvent"
        by default) and given sub_type ("platform_event" by default).
        """

        def consume_event(evt, *args, **kwargs):
            # A callback for consuming events.
            log.info('Event subscriber received evt: %s.', str(evt))
            self._events_received.append(evt)
            if count == 0:
                self._async_event_result.set(evt)

            elif count == len(self._events_received):
                self._async_event_result.set()

        sub = EventSubscriber(event_type=event_type,
                              sub_type=sub_type,
                              callback=consume_event)

        sub.start()
        log.info("registered event subscriber for event_type=%r, sub_type=%r, count=%d",
                 event_type, sub_type, count)

        self._event_subscribers.append(sub)
        sub._ready_event.wait(timeout=EVENT_TIMEOUT)

    def _stop_event_subscribers(self):
        """
        Stops the event subscribers on cleanup.
        """
        try:
            for sub in self._event_subscribers:
                if hasattr(sub, 'subscription_id'):
                    try:
                        self.PSC.deactivate_subscription(sub.subscription_id)
                    except:
                        pass
                    self.PSC.delete_subscription(sub.subscription_id)
                sub.stop()
        finally:
            self._event_subscribers = []

    #################################################################
    # config supporting methods
    #################################################################

    def _get_platform_stream_configs(self):
        """
        This method is an adaptation of get_streamConfigs in
        test_driver_egg.py
        """
        return [
            StreamConfiguration(stream_name='parsed',
                                parameter_dictionary_name='platform_eng_parsed',
                                records_per_granule=2,
                                granule_publish_rate=5)

            # TODO include a "raw" stream?
        ]

    def _get_instrument_stream_configs(self):
        """
        configs copied from test_activate_instrument.py
        """
        return [
            StreamConfiguration(stream_name='raw',
                                parameter_dictionary_name='ctd_raw_param_dict',
                                records_per_granule=2,
                                granule_publish_rate=5),

            StreamConfiguration(stream_name='parsed',
                                parameter_dictionary_name='ctd_parsed_param_dict',
                                records_per_granule=2, granule_publish_rate=5)
        ]

    def _verify_child_config(self, config, device_id, is_platform):
        for key in required_config_keys:
            self.assertIn(key, config)

        if is_platform:
            self.assertEqual(RT.PlatformDevice, config['device_type'])
            for key in DVR_CONFIG.iterkeys():
                self.assertIn(key, config['driver_config'])

            for key in ['startup_config']:
                self.assertEqual({}, config[key])
        else:
            self.assertEqual(RT.InstrumentDevice, config['device_type'])

            for key in ['children']:
                self.assertEqual({}, config[key])

        self.assertEqual({'resource_id': device_id}, config['agent'])
        self.assertIn('stream_config', config)

    def _verify_parent_config(self, config, parent_device_id,
                              child_device_id, is_platform):
        for key in required_config_keys:
            self.assertIn(key, config)
        self.assertEqual(RT.PlatformDevice, config['device_type'])
        for key in DVR_CONFIG.iterkeys():
            self.assertIn(key, config['driver_config'])
        self.assertEqual({'resource_id': parent_device_id}, config['agent'])
        self.assertIn('stream_config', config)
        for key in ['startup_config']:
            self.assertEqual({}, config[key])

        self.assertIn(child_device_id, config['children'])
        self._verify_child_config(config['children'][child_device_id],
                                  child_device_id, is_platform)

    def _create_platform_configuration(self, platform_id, parent_platform_id=None):
        """
        This method is an adaptation of test_agent_instance_config in
        test_instrument_management_service_integration.py

        @param platform_id
        @param parent_platform_id
        @return a DotDict with various of the constructed elements associated
                to the platform.
        """

        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()

        #
        # TODO will each platform have its own param dictionary?
        #
        param_dict_name = 'platform_eng_parsed'
        parsed_rpdict_id = self.DSC.read_parameter_dictionary_by_name(
            param_dict_name,
            id_only=True)
        self.parsed_stream_def_id = self.PSC.create_stream_definition(
            name='parsed',
            parameter_dictionary_id=parsed_rpdict_id)

        def _make_platform_agent_structure(agent_config=None):
            if None is agent_config: agent_config = {}

            driver_config = copy.deepcopy(DVR_CONFIG)
            driver_config['attributes'] = self._platform_attributes[platform_id]
            driver_config['ports']      = self._platform_ports[platform_id]
            log.debug("driver_config: %s", driver_config)

            # instance creation
            platform_agent_instance_obj = any_old(RT.PlatformAgentInstance, {
                'driver_config': driver_config})
            platform_agent_instance_obj.agent_config = agent_config
            platform_agent_instance_id = self.IMS.create_platform_agent_instance(platform_agent_instance_obj)

            # agent creation
            platform_agent_obj = any_old(RT.PlatformAgent, {
                "stream_configurations": self._get_platform_stream_configs(),
                'driver_module':         DVR_MOD,
                'driver_class':          DVR_CLS})
            platform_agent_id = self.IMS.create_platform_agent(platform_agent_obj)

            # device creation
            platform_device_id = self.IMS.create_platform_device(any_old(RT.PlatformDevice))

            # data product creation
            dp_obj = any_old(RT.DataProduct, {"temporal_domain":tdom, "spatial_domain": sdom})
            dp_id = self.DP.create_data_product(data_product=dp_obj, stream_definition_id=self.parsed_stream_def_id)
            self.DAMS.assign_data_product(input_resource_id=platform_device_id, data_product_id=dp_id)
            self.DP.activate_data_product_persistence(data_product_id=dp_id)
            self.addCleanup(self.DP.delete_data_product, dp_id)

            # assignments
            self.RR2.assign_platform_agent_instance_to_platform_device_with_has_agent_instance(platform_agent_instance_id, platform_device_id)
            self.RR2.assign_platform_agent_to_platform_agent_instance_with_has_agent_definition(platform_agent_id, platform_agent_instance_id)
            self.RR2.assign_platform_device_to_org_with_has_resource(platform_agent_instance_id, self.org_id)

            #######################################
            # dataset

            log.debug('data product = %s', dp_id)

            stream_ids, _ = self.RR.find_objects(dp_id, PRED.hasStream, None, True)
            log.debug('Data product stream_ids = %s', stream_ids)
            stream_id = stream_ids[0]

            # Retrieve the id of the OUTPUT stream from the out Data Product
            dataset_ids, _ = self.RR.find_objects(dp_id, PRED.hasDataset, RT.Dataset, True)
            log.debug('Data set for data_product_id1 = %s', dataset_ids[0])
            #######################################

            return platform_agent_instance_id, platform_agent_id, platform_device_id, stream_id

        log.debug("Making the structure for a platform agent")

        # TODO Note: the 'platform_config' entry is a mechanism that the
        # platform agent expects to know the platform_id and parent_platform_id.
        # Determine how to finally indicate this info.
        platform_config = {
            'platform_id':             platform_id,
            'parent_platform_id':      parent_platform_id,
        }

        child_agent_config = {
            'platform_config': platform_config
        }
        platform_agent_instance_child_id, _, platform_device_child_id, stream_id = \
            _make_platform_agent_structure(child_agent_config)

        platform_agent_instance_child_obj = self.RR2.read(platform_agent_instance_child_id)

        self.platform_device_parent_id = platform_device_child_id

        p_obj = DotDict()
        p_obj.platform_id = platform_id
        p_obj.parent_platform_id = parent_platform_id
        p_obj.platform_agent_instance_obj = platform_agent_instance_child_obj
        p_obj.platform_device_id = platform_device_child_id
        p_obj.platform_agent_instance_id = platform_agent_instance_child_id
        p_obj.stream_id = stream_id
        p_obj.pid = None  # known when process launched
        return p_obj

    def _create_platform(self, platform_id, parent_platform_id=None):
        """
        The main method to create a platform configuration and do other
        preparations for a given platform.
        """
        p_obj = self._create_platform_configuration(platform_id, parent_platform_id)

        # start corresponding data subscriber:
        self._start_data_subscriber(p_obj.platform_agent_instance_id,
                                    p_obj.stream_id)

        return p_obj

    #################################################################
    # platform child-parent linking
    #################################################################

    def _assign_child_to_parent(self, p_child, p_parent):

        log.debug("assigning child platform %r to parent %r",
                  p_child.platform_id, p_parent.platform_id)

        self.RR2.assign_platform_device_to_platform_device_with_has_device(p_child.platform_device_id,
                                                                           p_parent.platform_device_id)
        child_device_ids = self.RR2.find_platform_device_ids_of_device_using_has_device(p_parent.platform_device_id)
        self.assertNotEqual(0, len(child_device_ids))

    #################################################################
    # instrument
    #################################################################

    def _set_up_pre_environment_for_instrument(self, instr_info):
        """
        Based on test_instrument_agent.py

        Basically, this method launches a port agent and then completes the
        instrument driver configuration used to properly set up a particular
        instrument agent.

        @param instr_info  A value in instruments_dict
        @return instrument_driver_config
        """

        import sys
        from ion.agents.instrument.driver_process import DriverProcessType
        from ion.agents.instrument.driver_process import ZMQEggDriverProcess

        # A seabird driver.
        DRV_URI = SBE37_EGG
        DRV_MOD = 'mi.instrument.seabird.sbe37smb.ooicore.driver'
        DRV_CLS = 'SBE37Driver'

        WORK_DIR = '/tmp/'
        DELIM = ['<<', '>>']

        instrument_driver_config = {
            'dvr_egg' : DRV_URI,
            'dvr_mod' : DRV_MOD,
            'dvr_cls' : DRV_CLS,
            'workdir' : WORK_DIR,
            'process_type' : None
        }

        # Launch from egg or a local MI repo.
        LAUNCH_FROM_EGG=True

        if LAUNCH_FROM_EGG:
            # Dynamically load the egg into the test path
            launcher = ZMQEggDriverProcess(instrument_driver_config)
            egg = launcher._get_egg(DRV_URI)
            if not egg in sys.path: sys.path.insert(0, egg)
            instrument_driver_config['process_type'] = (DriverProcessType.EGG,)

        else:
            mi_repo = os.getcwd() + os.sep + 'extern' + os.sep + 'mi_repo'
            if not mi_repo in sys.path: sys.path.insert(0, mi_repo)
            instrument_driver_config['process_type'] = (DriverProcessType.PYTHON_MODULE,)
            instrument_driver_config['mi_repo'] = mi_repo

        DEV_ADDR  = instr_info['DEV_ADDR']
        DEV_PORT  = instr_info['DEV_PORT']
        DATA_PORT = instr_info['DATA_PORT']
        CMD_PORT  = instr_info['CMD_PORT']
        PA_BINARY = instr_info['PA_BINARY']

        support = DriverIntegrationTestSupport(None,
                                               None,
                                               DEV_ADDR,
                                               DEV_PORT,
                                               DATA_PORT,
                                               CMD_PORT,
                                               PA_BINARY,
                                               DELIM,
                                               WORK_DIR)

        # Start port agent, add stop to cleanup.
        port = support.start_pagent()
        log.info('Port agent started at port %i', port)
        self.addCleanup(support.stop_pagent)

        # Configure instrument driver to use port agent port number.
        instrument_driver_config['comms_config'] = {
            'addr':     'localhost',
            'port':     port,
            'cmd_port': CMD_PORT
        }

        return instrument_driver_config

    def _make_instrument_agent_structure(self, instr_key, org_obj, agent_config=None):
        if None is agent_config: agent_config = {}

        instr_info = instruments_dict[instr_key]

        # initially adapted from test_activate_instrument:test_activateInstrumentSample

        # agent creation
        instrument_agent_obj = IonObject(RT.InstrumentAgent,
                                         name='agent007_%s' % instr_key,
                                         description="SBE37IMAgent_%s" % instr_key,
                                         driver_uri=SBE37_EGG,
                                         stream_configurations=self._get_instrument_stream_configs())

        instrument_agent_id = self.IMS.create_instrument_agent(instrument_agent_obj)
        log.debug('new InstrumentAgent id = %s', instrument_agent_id)

        self.IMS.assign_instrument_model_to_instrument_agent(self.instModel_id, instrument_agent_id)

        # device creation
        instDevice_obj = IonObject(RT.InstrumentDevice,
                                   name='SBE37IMDevice_%s' % instr_key,
                                   description="SBE37IMDevice_%s" % instr_key,
                                   serial_number="12345")
        instrument_device_id = self.IMS.create_instrument_device(instrument_device=instDevice_obj)
        self.IMS.assign_instrument_model_to_instrument_device(self.instModel_id, instrument_device_id)
        log.debug("new InstrumentDevice id = %s ", instrument_device_id)

        #Create stream alarms


        temp_alert_def = {
            'name' : 'temperature_warning_interval',
            'stream_name' : 'parsed',
            'message' : 'Temperature is below the normal range of 50.0 and above.',
            'alert_type' : StreamAlertType.WARNING,
            'aggregate_type' : AggregateStatusType.AGGREGATE_DATA,
            'value_id' : 'temp',
            'lower_bound' : 50.0,
            'lower_rel_op' : '<',
            'alert_class' : 'IntervalAlert'
        }

        late_data_alert_def = {
            'name' : 'late_data_warning',
            'stream_name' : 'parsed',
            'message' : 'Expected data has not arrived.',
            'alert_type' : StreamAlertType.WARNING,
            'aggregate_type' : AggregateStatusType.AGGREGATE_COMMS,
            'value_id' : None,
            'time_delta' : 2,
            'alert_class' : 'LateDataAlert'
        }

        instrument_driver_config = self._set_up_pre_environment_for_instrument(instr_info)

        port_agent_config = {
            'device_addr':     instr_info['DEV_ADDR'],
            'device_port':     instr_info['DEV_PORT'],
            'data_port':       instr_info['DATA_PORT'],
            'command_port':    instr_info['CMD_PORT'],
            'binary_path':     instr_info['PA_BINARY'],
            'process_type':    PortAgentProcessType.UNIX,
            'port_agent_addr': 'localhost',
            'log_level':       5,
            'type':            PortAgentType.ETHERNET
        }

        # instance creation
        instrument_agent_instance_obj = IonObject(RT.InstrumentAgentInstance,
                                                  name='SBE37IMAgentInstance_%s' % instr_key,
                                                  description="SBE37IMAgentInstance_%s" % instr_key,
                                                  driver_config=instrument_driver_config,
                                                  port_agent_config=port_agent_config,
                                                  alerts=[temp_alert_def, late_data_alert_def])

        instrument_agent_instance_obj.agent_config = agent_config

        instrument_agent_instance_id = self.IMS.create_instrument_agent_instance(instrument_agent_instance_obj)

        # data products

        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()

        org_id = self.RR2.create(org_obj)

        # parsed:

        parsed_pdict_id = self.DSC.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        parsed_stream_def_id = self.PSC.create_stream_definition(
            name='ctd_parsed',
            parameter_dictionary_id=parsed_pdict_id)

        dp_obj = IonObject(RT.DataProduct,
                           name='the parsed data for %s' % instr_key,
                           description='ctd stream test',
                           temporal_domain=tdom,
                           spatial_domain=sdom)

        data_product_id1 = self.DP.create_data_product(data_product=dp_obj,
                                                       stream_definition_id=parsed_stream_def_id)
        self.DP.activate_data_product_persistence(data_product_id=data_product_id1)
        self.addCleanup(self.DP.delete_data_product, data_product_id1)

        self.DAMS.assign_data_product(input_resource_id=instrument_device_id,
                                      data_product_id=data_product_id1)

        # raw:

        raw_pdict_id = self.DSC.read_parameter_dictionary_by_name('ctd_raw_param_dict', id_only=True)
        raw_stream_def_id = self.PSC.create_stream_definition(
            name='ctd_raw',
            parameter_dictionary_id=raw_pdict_id)

        dp_obj = IonObject(RT.DataProduct,
                           name='the raw data for %s' % instr_key,
                           description='raw stream test',
                           temporal_domain=tdom,
                           spatial_domain=sdom)

        data_product_id2 = self.DP.create_data_product(data_product=dp_obj,
                                                       stream_definition_id=raw_stream_def_id)

        self.DP.activate_data_product_persistence(data_product_id=data_product_id2)
        self.addCleanup(self.DP.delete_data_product, data_product_id2)

        self.DAMS.assign_data_product(input_resource_id=instrument_device_id,
                                      data_product_id=data_product_id2)

        # assignments
        self.RR2.assign_instrument_agent_instance_to_instrument_device_with_has_agent_instance(instrument_agent_instance_id, instrument_device_id)
        self.RR2.assign_instrument_agent_to_instrument_agent_instance_with_has_agent_definition(instrument_agent_id, instrument_agent_instance_id)
        self.RR2.assign_instrument_device_to_org_with_has_resource(instrument_agent_instance_id, org_id)

        i_obj = DotDict()
        i_obj.instrument_agent_id = instrument_agent_id
        i_obj.instrument_device_id = instrument_device_id
        i_obj.instrument_agent_instance_id = instrument_agent_instance_id
        i_obj.org_obj = org_obj

        log.debug("KK CREATED I_obj: %s", i_obj)

        return i_obj

    def _create_instrument(self, instr_key):
        """
        The main method to create an instrument configuration.

        @param instr_key  A key in instruments_dict
        @return instrument_driver_config
        """

        self.assertIn(instr_key, instruments_dict)
        self.assertNotIn(instr_key, self._setup_instruments)

        instr_info = instruments_dict[instr_key]

        log.debug("_create_instrument: creating instrument %r: %s",
                  instr_key, instr_info)

        org_obj = any_old(RT.Org)

        log.debug("making the structure for an instrument agent")
        i_obj = self._make_instrument_agent_structure(instr_key, org_obj)

        self._setup_instruments[instr_key] = i_obj

        log.debug("_create_instrument: created instrument %r", instr_key)

        return i_obj

    def _get_instrument(self, instr_key):
        """
        Gets the i_obj constructed by _create_instrument(instr_key).
        """
        self.assertIn(instr_key, self._setup_instruments)
        i_obj = self._setup_instruments[instr_key]
        return i_obj

    #################################################################
    # instrument-platform linking
    #################################################################

    def _assign_instrument_to_platform(self, i_obj, p_obj):

        log.debug("assigning instrument %r to platform %r",
                  i_obj.instrument_agent_instance_id, p_obj.platform_id)

        self.RR2.assign_instrument_device_to_platform_device_with_has_device(
            i_obj.instrument_device_id,
            p_obj.platform_device_id)

        child_device_ids = self.RR2.find_instrument_device_ids_of_device_using_has_device(p_obj.platform_device_id)
        self.assertNotEqual(0, len(child_device_ids))

    #################################################################
    # some platform topologies
    #################################################################

    def _create_single_platform(self):
        """
        Creates and prepares a platform corresponding to the
        platform ID 'LJ01D', which is a leaf in the simulated network.
        """
        p_root = self._create_platform('LJ01D')
        return p_root

    def _create_small_hierarchy(self):
        """
        Creates a small platform network consisting of 3 platforms as follows:
          Node1D -> MJ01C -> LJ01D
        where -> goes from parent to child.
        """

        p_root       = self._create_platform('Node1D')
        p_child      = self._create_platform('MJ01C', parent_platform_id='Node1D')
        p_grandchild = self._create_platform('LJ01D', parent_platform_id='MJ01C')

        self._assign_child_to_parent(p_child, p_root)
        self._assign_child_to_parent(p_grandchild, p_child)

        return p_root

    def _create_hierarchy(self, platform_id, p_objs, parent_obj=None):
        """
        Creates a hierarchy of platforms rooted at the given platform.

        @param platform_id  ID of the root platform at this level
        @param p_objs       dict to be updated with (platform_id: p_obj)
                            mappings
        @param parent_obj   platform object of the parent, if any

        @return platform object for the created root.
        """

        # create the object to be returned:
        p_obj = self._create_platform(platform_id)

        # update (platform_id: p_obj) dict:
        p_objs[platform_id] = p_obj

        # recursively create child platforms:
        pnode = self._network_definition.pnodes[platform_id]
        for sub_platform_id in pnode.subplatforms:
            self._create_hierarchy(sub_platform_id, p_objs, p_obj)

        if parent_obj:
            self._assign_child_to_parent(p_obj, parent_obj)

        return p_obj

    def _set_up_single_platform_with_some_instruments(self, instr_keys):
        """
        Sets up single platform with some instruments

        @param instr_keys  Keys of the instruments to be assigned.
                           Must be keys in instruments_dict in
                           base_test_platform_agent_with_rsn

        @return p_root for subsequent termination
        """

        for instr_key in instr_keys:
            self.assertIn(instr_key, instruments_dict)

        p_root = self._create_single_platform()

        # create and assign instruments:
        for instr_key in instr_keys:
            i_obj = self._create_instrument(instr_key)
            self._assign_instrument_to_platform(i_obj, p_root)

        return p_root

    def _set_up_platform_hierarchy_with_some_instruments(self, instr_keys):
        """
        Sets up a multiple-level platform hierarchy with instruments associated
        to some of the platforms.

        The platform hierarchy corresponds to the sub-network in the
        simulated topology rooted at 'Node1B', which at time of writing
        looks like this:

        Node1B
            Node1C
                Node1D
                    MJ01C
                        LJ01D
                LV01C
                    PC01B
                        SC01B
                            SF01B
                    LJ01C
            LV01B
                LJ01B
                MJ01B

        In DEBUG logging level for the platform agent, files like the following
        are generated under logs/:
           platform_CFG_received_Node1B.txt
           platform_CFG_received_MJ01C.txt
           platform_CFG_received_LJ01D.txt

        @param instr_keys  Keys of the instruments to be assigned.
                           Must be keys in instruments_dict in
                           base_test_platform_agent_with_rsn

        @return p_root for subsequent termination
        """

        for instr_key in instr_keys:
            self.assertIn(instr_key, instruments_dict)

        #####################################
        # create platform hierarchy
        #####################################
        log.info("will create platform hierarchy ...")
        start_time = time.time()

        root_platform_id = 'Node1B'
        p_objs = {}
        p_root = self._create_hierarchy(root_platform_id, p_objs)

        log.info("platform hierarchy built. Took %.3f secs. "
                  "Root platform=%r, number of platforms=%d: %s",
                  time.time() - start_time,
                  root_platform_id, len(p_objs), p_objs.keys())

        self.assertIn(root_platform_id, p_objs)
        self.assertEquals(13, len(p_objs))

        #####################################
        # create the indicated instruments
        #####################################
        log.info("will create %d instruments: %s", len(instr_keys), instr_keys)
        start_time = time.time()

        i_objs = []
        for instr_key in instr_keys:
            i_obj = self._create_instrument(instr_key)
            i_objs.append(i_obj)
            log.debug("instrument created = %r (%s)",
                      i_obj.instrument_agent_instance_id, instr_key)

        log.info("%d instruments created. Took %.3f secs.", len(instr_keys), time.time() - start_time)

        #####################################
        # assign the instruments
        #####################################
        log.info("will assign instruments ...")
        start_time = time.time()

        plats_to_assign_instrs = [
            'LJ01D', 'SF01B', 'LJ01B', 'MJ01B',     # leaves
            'MJ01C', 'Node1D', 'LV01B', 'Node1C'    # intermediate
        ]

        # assign one available instrument to a platform;
        # the assignments are arbitrary.
        num_assigns = min(len(instr_keys), len(plats_to_assign_instrs))

        for ii in range(num_assigns):
            platform_id = plats_to_assign_instrs[ii]
            self.assertIn(platform_id, p_objs)
            p_obj = p_objs[platform_id]
            i_obj = i_objs[ii]
            self._assign_instrument_to_platform(i_obj, p_obj)
            log.debug("instrument %r (%s) assigned to platform %r",
                      i_obj.instrument_agent_instance_id,
                      instr_keys[ii],
                      platform_id)

        log.info("%d instruments assigned. Took %.3f secs.",
                 num_assigns, time.time() - start_time)

        return p_root

    #################################################################
    # start / stop platform
    #################################################################

    def _start_platform(self, p_obj):
        agent_instance_id = p_obj.platform_agent_instance_id
        log.debug("about to call start_platform_agent_instance with id=%s", agent_instance_id)
        p_obj.pid = self.IMS.start_platform_agent_instance(platform_agent_instance_id=agent_instance_id)
        log.debug("start_platform_agent_instance returned pid=%s", p_obj.pid)

        #wait for start
        agent_instance_obj = self.IMS.read_platform_agent_instance(agent_instance_id)
        gate = ProcessStateGate(self.PDC.read_process,
                                agent_instance_obj.agent_process_id,
                                ProcessStateEnum.RUNNING)
        self.assertTrue(gate.await(90), "The platform agent instance did not spawn in 90 seconds")

        # Start a resource agent client to talk with the agent.
        self._pa_client = ResourceAgentClient('paclient',
                                              name=agent_instance_obj.agent_process_id,
                                              process=FakeProcess())
        log.debug("got platform agent client %s", str(self._pa_client))

    def _stop_platform(self, p_obj):
        try:
            self.IMS.stop_platform_agent_instance(p_obj.platform_agent_instance_id)
        except:
            if log.isEnabledFor(logging.TRACE):
                log.exception(
                    "platform_id=%r: Exception in IMS.stop_platform_agent_instance with "
                    "platform_agent_instance_id = %r",
                    p_obj.platform_id, p_obj.platform_agent_instance_id)
            else:
                log.warn(
                    "platform_id=%r: Exception in IMS.stop_platform_agent_instance with "
                    "platform_agent_instance_id = %r. Perhaps already dead.",
                    p_obj.platform_id, p_obj.platform_agent_instance_id)

    #################################################################
    # misc convenience methods
    #################################################################

    def _create_resource_agent_client(self, resource_id):
        client = ResourceAgentClient(resource_id, process=FakeProcess())
        return client

    def _get_state(self):
        state = self._pa_client.get_agent_state()
        return state

    def _assert_state(self, state):
        self.assertEquals(self._get_state(), state)

    def _execute_agent(self, cmd):
        log.info("_execute_agent: cmd=%r kwargs=%r ...", cmd.command, cmd.kwargs)
        time_start = time.time()
        #retval = self._pa_client.execute_agent(cmd, timeout=timeout)
        retval = self._pa_client.execute_agent(cmd)
        elapsed_time = time.time() - time_start
        log.info("_execute_agent: cmd=%r elapsed_time=%s, retval = %s",
                 cmd.command, elapsed_time, str(retval))
        return retval

    #################################################################
    # commands that concrete tests can call
    #################################################################

    def _ping_agent(self):
        retval = self._pa_client.ping_agent()
        self.assertIsInstance(retval, str)

    def _ping_resource(self):
        cmd = AgentCommand(command=PlatformAgentEvent.PING_RESOURCE)
        if self._get_state() == PlatformAgentState.UNINITIALIZED:
            # should get ServerError: "Command not handled in current state"
            with self.assertRaises(ServerError):
                self._pa_client.execute_agent(cmd)
        else:
            # In all other states the command should be accepted:
            retval = self._execute_agent(cmd)
            self.assertEquals("PONG", retval.result)

    def _get_metadata(self):
        cmd = AgentCommand(command=PlatformAgentEvent.GET_METADATA)
        retval = self._execute_agent(cmd)
        md = retval.result
        self.assertIsInstance(md, dict)
        # TODO verify possible subset of required entries in the dict.
        log.info("GET_METADATA = %s", md)

    def _get_ports(self):
        cmd = AgentCommand(command=PlatformAgentEvent.GET_PORTS)
        retval = self._execute_agent(cmd)
        md = retval.result
        self.assertIsInstance(md, dict)
        # TODO verify possible subset of required entries in the dict.
        log.info("GET_PORTS = %s", md)

    def _initialize(self):
        self._assert_state(PlatformAgentState.UNINITIALIZED)
        cmd = AgentCommand(command=PlatformAgentEvent.INITIALIZE)
        retval = self._execute_agent(cmd)
        self._assert_state(PlatformAgentState.INACTIVE)

    def _go_active(self):
        cmd = AgentCommand(command=PlatformAgentEvent.GO_ACTIVE)
        retval = self._execute_agent(cmd)
        self._assert_state(PlatformAgentState.IDLE)

    def _run(self):
        cmd = AgentCommand(command=PlatformAgentEvent.RUN)
        retval = self._execute_agent(cmd)
        self._assert_state(PlatformAgentState.COMMAND)

    def _start_resource_monitoring(self):
        cmd = AgentCommand(command=PlatformAgentEvent.START_MONITORING)
        retval = self._execute_agent(cmd)
        self._assert_state(PlatformAgentState.MONITORING)

    def _wait_for_a_data_sample(self):
        log.info("waiting for reception of a data sample...")
        # just wait for at least one -- see consume_data
        self._async_data_result.get(timeout=DATA_TIMEOUT)
        self.assertTrue(len(self._samples_received) >= 1)
        log.info("Received samples: %s", len(self._samples_received))

    def _wait_for_external_event(self):
        log.info("waiting for reception of an external event...")
        # just wait for at least one -- see consume_event
        self._async_event_result.get(timeout=EVENT_TIMEOUT)
        self.assertTrue(len(self._events_received) >= 1)
        log.info("Received events: %s", len(self._events_received))

    def _stop_resource_monitoring(self):
        cmd = AgentCommand(command=PlatformAgentEvent.STOP_MONITORING)
        retval = self._execute_agent(cmd)
        self._assert_state(PlatformAgentState.COMMAND)

    def _pause(self):
        cmd = AgentCommand(command=PlatformAgentEvent.PAUSE)
        retval = self._execute_agent(cmd)
        self._assert_state(PlatformAgentState.STOPPED)

    def _resume(self):
        cmd = AgentCommand(command=PlatformAgentEvent.RESUME)
        retval = self._execute_agent(cmd)
        self._assert_state(PlatformAgentState.COMMAND)

    def _clear(self):
        cmd = AgentCommand(command=PlatformAgentEvent.CLEAR)
        retval = self._execute_agent(cmd)
        self._assert_state(PlatformAgentState.IDLE)

    def _go_inactive(self):
        cmd = AgentCommand(command=PlatformAgentEvent.GO_INACTIVE)
        retval = self._execute_agent(cmd)
        self._assert_state(PlatformAgentState.INACTIVE)

    def _reset(self):
        cmd = AgentCommand(command=PlatformAgentEvent.RESET)
        retval = self._execute_agent(cmd)
        self._assert_state(PlatformAgentState.UNINITIALIZED)

    def _check_sync(self):
        cmd = AgentCommand(command=PlatformAgentEvent.CHECK_SYNC)
        retval = self._execute_agent(cmd)
        log.info("CHECK_SYNC result: %s", retval.result)
        self.assertTrue(retval.result is not None)
        self.assertEquals(retval.result[0:3], "OK:")
        return retval.result

    def _stream_instruments(self):
        from mi.instrument.seabird.sbe37smb.ooicore.driver import SBE37ProtocolEvent
        from mi.instrument.seabird.sbe37smb.ooicore.driver import SBE37Parameter

        for instrument in  self._setup_instruments.itervalues():
            # instruments that have been set up: instr_key: i_obj

            # Start a resource agent client to talk with the instrument agent.
            _ia_client = self._create_resource_agent_client(instrument.instrument_device_id)

            cmd = AgentCommand(command=SBE37ProtocolEvent.START_AUTOSAMPLE)
            retval = _ia_client.execute_resource(cmd)
            log.debug('_stream_instruments retval: %s', retval)

        return

    def _idle_instruments(self):
        from mi.instrument.seabird.sbe37smb.ooicore.driver import SBE37ProtocolEvent
        from mi.instrument.seabird.sbe37smb.ooicore.driver import SBE37Parameter

        for instrument in  self._setup_instruments.itervalues():
            # instruments that have been set up: instr_key: i_obj

            # Start a resource agent client to talk with the instrument agent.
            _ia_client = self._create_resource_agent_client(instrument.instrument_device_id)

            cmd = AgentCommand(command=SBE37ProtocolEvent.STOP_AUTOSAMPLE)
            with self.assertRaises(Conflict):
                retval = _ia_client.execute_resource(cmd)

            cmd = AgentCommand(command=ResourceAgentEvent.RESET)
            retval = _ia_client.execute_agent(cmd)
            state = _ia_client.get_agent_state()
            self.assertEqual(state, ResourceAgentState.UNINITIALIZED)

        return
class TestPlatformInstrument(BaseIntTestPlatform):

    def setUp(self):
        self._start_container()

        self._pp = pprint.PrettyPrinter()

        log.debug("oms_uri = %s", OMS_URI)
        self.oms = CIOMSClientFactory.create_instance(OMS_URI)

        #url = OmsTestMixin.start_http_server()
        #log.debug("TestPlatformInstrument:setup http url %s", url)
        #
        #result = self.oms.event.register_event_listener(url)
        #log.debug("TestPlatformInstrument:setup register_event_listener result %s", result)


        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.pubsubclient =  PubsubManagementServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
        self.omsclient = ObservatoryManagementServiceClient(node=self.container.node)
        self.datasetclient =  DatasetManagementServiceClient(node=self.container.node)
        self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node)
        self.dpclient = DataProductManagementServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.dataset_management = DatasetManagementServiceClient()
        self.RR2  = EnhancedResourceRegistryClient(self.rrclient)

        self.org_id = self.RR2.create(any_old(RT.Org))
        log.debug("Org created: %s", self.org_id)

        # see _set_receive_timeout
        self._receive_timeout = 300

        self.instrument_device_id = ''
        self.platform_device_id = ''
        self.platform_site_id = ''
        self.platform_agent_instance_id = ''
        self._pa_client = ''

        def done():
            CIOMSClientFactory.destroy_instance(self.oms)
            event_notifications = OmsTestMixin.stop_http_server()
            log.info("event_notifications = %s" % str(event_notifications))

        self.addCleanup(done)


    @unittest.skip('Must be run locally...')
    def test_platform_with_instrument_streaming(self):
        #
        # The following is with just a single platform and the single
        # instrument "SBE37_SIM_08", which corresponds to the one on port 4008.
        #

        #load the paramaters and the param dicts necesssary for the VEL3D
        log.debug( "load params------------------------------------------------------------------------------")
        self._load_params()

        log.debug( " _register_oms_listener------------------------------------------------------------------------------")
        self._register_oms_listener()

        #create the instrument device/agent/mode
        log.debug( "---------- create_instrument_resources ----------" )
        self._create_instrument_resources()

        #create the platform device, agent and instance
        log.debug( "---------- create_platform_configuration ----------" )
        self._create_platform_configuration('LPJBox_CI_Ben_Hall')

        self.rrclient.create_association(subject=self.platform_device_id, predicate=PRED.hasDevice, object=self.instrument_device_id)

        log.debug( "---------- start_platform ----------" )
        self._start_platform()
        self.addCleanup(self._stop_platform)

        # get everything in command mode:
        self._ping_agent()
        log.debug( " ---------- initialize ----------" )
        self._initialize()


        _ia_client = ResourceAgentClient(self.instrument_device_id, process=FakeProcess())
        state = _ia_client.get_agent_state()
        log.info("TestPlatformInstrument get_agent_state %s", state)

        log.debug( " ---------- go_active ----------" )
        self._go_active()
        state = _ia_client.get_agent_state()
        log.info("TestPlatformInstrument get_agent_state %s", state)

        log.debug( "---------- run ----------" )
        self._run()

        gevent.sleep(2)


        log.debug( " ---------- _start_resource_monitoring ----------" )
        self._start_resource_monitoring()
        gevent.sleep(2)
#
#        # verify the instrument is command state:
#        state = ia_client.get_agent_state()
#        log.debug(" TestPlatformInstrument get_agent_state: %s", state)
#        self.assertEqual(state, ResourceAgentState.COMMAND)  _stop_resource_monitoring

        log.debug( " ---------- _stop_resource_monitoring ----------" )
        self._stop_resource_monitoring()
        gevent.sleep(2)


        log.debug( " ---------- go_inactive ----------" )
        self._go_inactive()
        state = _ia_client.get_agent_state()
        log.info("TestPlatformInstrument get_agent_state %s", state)



        self._reset()
        self._shutdown()


    def _get_platform_attributes(self):
        log.debug( " ----------get_platform_attributes ----------")
        attr_infos = self.oms.attr.get_platform_attributes('LPJBox_CI_Ben_Hall')
        log.debug('_get_platform_attributes: %s', self._pp.pformat(attr_infos))

        attrs = attr_infos['LPJBox_CI_Ben_Hall']
        for attrid, arrinfo in attrs.iteritems():
            arrinfo['attr_id'] = attrid

        log.debug('_get_platform_attributes: %s', self._pp.pformat(attrs))
        return attrs

    def _load_params(self):

        log.info(" ---------- load_params ----------")
        # load_parameter_scenarios
        self.container.spawn_process("Loader", "ion.processes.bootstrap.ion_loader", "IONLoader", config=dict(
            op="load",
            scenario="BETA",
            path="master",
            categories="ParameterFunctions,ParameterDefs,ParameterDictionary,StreamDefinition",
            clearcols="owner_id,org_ids",
            assets="res/preload/r2_ioc/ooi_assets",
            parseooi="True",
        ))

    def _create_platform_configuration(self, platform_id, parent_platform_id=None):
        """
        This method is an adaptation of test_agent_instance_config in
        test_instrument_management_service_integration.py

        @param platform_id
        @param parent_platform_id
        @return a DotDict with various of the constructed elements associated
                to the platform.
        """

        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()

        param_dict_name = 'platform_eng_parsed'
        parsed_rpdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            param_dict_name,
            id_only=True)
        self.parsed_stream_def_id = self.pubsubclient.create_stream_definition(
            name='parsed',
            parameter_dictionary_id=parsed_rpdict_id)


        driver_config = PLTFRM_DVR_CONFIG
        driver_config['attributes'] = self._get_platform_attributes()    #self._platform_attributes[platform_id]

        #OMS returning an error for port.get_platform_ports
        #driver_config['ports']      = self._platform_ports[platform_id]
        log.debug("driver_config: %s", driver_config)

        # instance creation
        platform_agent_instance_obj = any_old(RT.PlatformAgentInstance, {
            'driver_config': driver_config})

        platform_agent_instance_obj.agent_config = {
                'platform_config': { 'platform_id': 'LPJBox_CI_Ben_Hall', 'parent_platform_id':  None }
            }

        self.platform_agent_instance_id = self.imsclient.create_platform_agent_instance(platform_agent_instance_obj)

        # agent creation
        platform_agent_obj = any_old(RT.PlatformAgent, {
            "stream_configurations": self._get_platform_stream_configs(),
            'driver_module':         PLTFRM_DVR_MOD,
            'driver_class':          PLTFRM_DVR_CLS})
        platform_agent_id = self.imsclient.create_platform_agent(platform_agent_obj)

        # device creation
        self.platform_device_id = self.imsclient.create_platform_device(any_old(RT.PlatformDevice))

        # data product creation
        dp_obj = any_old(RT.DataProduct, {"temporal_domain":tdom, "spatial_domain": sdom})
        dp_id = self.dpclient.create_data_product(data_product=dp_obj, stream_definition_id=self.parsed_stream_def_id)
        self.damsclient.assign_data_product(input_resource_id=self.platform_device_id, data_product_id=dp_id)
        self.dpclient.activate_data_product_persistence(data_product_id=dp_id)
        self.addCleanup(self.dpclient.delete_data_product, dp_id)

        # assignments
        self.RR2.assign_platform_agent_instance_to_platform_device_with_has_agent_instance(self.platform_agent_instance_id, self.platform_device_id)
        self.RR2.assign_platform_agent_to_platform_agent_instance_with_has_agent_definition(platform_agent_id, self.platform_agent_instance_id)
        self.RR2.assign_platform_device_to_org_with_has_resource(self.platform_agent_instance_id, self.org_id)

        #######################################
        # dataset

        log.debug('data product = %s', dp_id)

        stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStream, None, True)
        log.debug('Data product stream_ids = %s', stream_ids)
        stream_id = stream_ids[0]

        # Retrieve the id of the OUTPUT stream from the out Data Product
        dataset_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasDataset, RT.Dataset, True)
        log.debug('Data set for data_product_id1 = %s', dataset_ids[0])
        #######################################


        log.debug('_create_platform_site_and_deployment  platform_device_id: %s', self.platform_device_id)

        site_object = IonObject(RT.PlatformSite, name='PlatformSite1')
        self.platform_site_id = self.omsclient.create_platform_site(platform_site=site_object, parent_id='')
        log.debug('_create_platform_site_and_deployment  site id: %s', self.platform_site_id)

        #create supporting objects for the Deployment resource
        # 1. temporal constraint
        # find current deployment using time constraints
        current_time =  int( calendar.timegm(time.gmtime()) )
        # two years on either side of current time
        start = current_time - 63115200
        end = current_time + 63115200
        temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=str(start), end_datetime=str(end))
        # 2. PlatformPort object which defines device to port map
        platform_port_obj= IonObject(OT.PlatformPort, reference_designator = 'GA01SUMO-FI003-01-CTDMO0999',
                                                        port_type=PortTypeEnum.UPLINK,
                                                        ip_address='0')

        # now create the Deployment
        deployment_obj = IonObject(RT.Deployment,
                                   name='TestPlatformDeployment',
                                   description='some new deployment',
                                   context=IonObject(OT.CabledNodeDeploymentContext),
                                   constraint_list=[temporal_bounds],
                                   port_assignments={self.platform_device_id:platform_port_obj})

        platform_deployment_id = self.omsclient.create_deployment(deployment=deployment_obj, site_id=self.platform_site_id, device_id=self.platform_device_id)
        log.debug('_create_platform_site_and_deployment  deployment_id: %s', platform_deployment_id)

        deploy_obj2 = self.omsclient.read_deployment(platform_deployment_id)
        log.debug('_create_platform_site_and_deployment  deploy_obj2 : %s', deploy_obj2)
        return self.platform_site_id, platform_deployment_id


    def _create_instrument_resources(self):
        # Create InstrumentModel
        instModel_obj = IonObject(RT.InstrumentModel,
            name='VEL3D',
            description="VEL3D")
        instModel_id = self.imsclient.create_instrument_model(instModel_obj)
        log.debug( 'new InstrumentModel id = %s ', instModel_id)


        raw_config = StreamConfiguration(stream_name='raw', parameter_dictionary_name='raw' )
        vel3d_b_sample = StreamConfiguration(stream_name='vel3d_b_sample', parameter_dictionary_name='vel3d_b_sample')
        vel3d_b_engineering = StreamConfiguration(stream_name='vel3d_b_engineering', parameter_dictionary_name='vel3d_b_engineering')


        # Create InstrumentAgent
        instAgent_obj = IonObject(RT.InstrumentAgent,
            name='agent007',
            description="SBE37IMAgent",
            driver_uri="http://sddevrepo.oceanobservatories.org/releases/nobska_mavs4_ooicore-0.0.7-py2.7.egg",
            stream_configurations = [raw_config, vel3d_b_sample, vel3d_b_engineering])
        instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj)
        log.debug('new InstrumentAgent id = %s', instAgent_id)

        self.imsclient.assign_instrument_model_to_instrument_agent(instModel_id, instAgent_id)

        # Create InstrumentDevice
        instDevice_obj = IonObject(RT.InstrumentDevice,
            name='VEL3DDevice',
            description="VEL3DDevice",
            serial_number="12345" )
        self.instrument_device_id = self.imsclient.create_instrument_device(instrument_device=instDevice_obj)
        self.imsclient.assign_instrument_model_to_instrument_device(instModel_id, self.instrument_device_id)

        port_agent_config = {
            'device_addr':  '10.180.80.6',
            'device_port':  2101,
            'process_type': PortAgentProcessType.UNIX,
            'binary_path': "port_agent",
            'port_agent_addr': 'localhost',
            'command_port': 1025,
            'data_port': 1026,
            'log_level': 5,
            'type': PortAgentType.ETHERNET
        }

        instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='VEL3DAgentInstance',
            description="VEL3DAgentInstance",
            port_agent_config = port_agent_config,
            alerts= [])


        instAgentInstance_id = self.imsclient.create_instrument_agent_instance(instAgentInstance_obj,
            instAgent_id,
            self.instrument_device_id)
        self._start_port_agent(self.imsclient.read_instrument_agent_instance(instAgentInstance_id))

        vel3d_b_sample_pdict_id = self.dataset_management.read_parameter_dictionary_by_name('vel3d_b_sample', id_only=True)
        vel3d_b_sample_stream_def_id = self.pubsubclient.create_stream_definition(name='vel3d_b_sample', parameter_dictionary_id=vel3d_b_sample_pdict_id)

        vel3d_b_engineering_pdict_id = self.dataset_management.read_parameter_dictionary_by_name('vel3d_b_engineering', id_only=True)
        vel3d_b_engineering_stream_def_id = self.pubsubclient.create_stream_definition(name='vel3d_b_engineering', parameter_dictionary_id=vel3d_b_engineering_pdict_id)

        raw_pdict_id = self.dataset_management.read_parameter_dictionary_by_name('raw', id_only=True)
        raw_stream_def_id = self.pubsubclient.create_stream_definition(name='raw', parameter_dictionary_id=raw_pdict_id)


        #-------------------------------
        # Create Raw and Parsed Data Products for the device
        #-------------------------------
        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()

        dp_obj = IonObject(RT.DataProduct,
            name='vel3d_b_sample',
            description='vel3d_b_sample',
            temporal_domain = tdom,
            spatial_domain = sdom)

        data_product_id1 = self.dpclient.create_data_product(data_product=dp_obj, stream_definition_id=vel3d_b_sample_stream_def_id)
        self.damsclient.assign_data_product(input_resource_id=self.instrument_device_id, data_product_id=data_product_id1)
        self.dpclient.activate_data_product_persistence(data_product_id=data_product_id1)


        dp_obj = IonObject(RT.DataProduct,
            name='vel3d_b_engineering',
            description='vel3d_b_engineering',
            temporal_domain = tdom,
            spatial_domain = sdom)

        data_product_id2 = self.dpclient.create_data_product(data_product=dp_obj, stream_definition_id=vel3d_b_engineering_stream_def_id)
        self.damsclient.assign_data_product(input_resource_id=self.instrument_device_id, data_product_id=data_product_id2)
        self.dpclient.activate_data_product_persistence(data_product_id=data_product_id2)


        dp_obj = IonObject(RT.DataProduct,
            name='the raw data',
            description='raw stream test',
            temporal_domain = tdom,
            spatial_domain = sdom)

        data_product_id3 = self.dpclient.create_data_product(data_product=dp_obj, stream_definition_id=raw_stream_def_id)
        self.damsclient.assign_data_product(input_resource_id=self.instrument_device_id, data_product_id=data_product_id3)
        self.dpclient.activate_data_product_persistence(data_product_id=data_product_id3)

        #create instrument site and associated deployment
        site_object = IonObject(RT.InstrumentSite, name='InstrumentSite1')
        instrument_site_id = self.omsclient.create_instrument_site(instrument_site=site_object, parent_id=self.platform_site_id)
        log.debug('_create_instrument_site_and_deployment  site id: %s', instrument_site_id)


        #create supporting objects for the Deployment resource
        # 1. temporal constraint
        # find current deployment using time constraints
        current_time =  int( calendar.timegm(time.gmtime()) )
        # two years on either side of current time
        start = current_time - 63115200
        end = current_time + 63115200
        temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=str(start), end_datetime=str(end))
        # 2. PlatformPort object which defines device to port map
        platform_port_obj= IonObject(OT.PlatformPort, reference_designator = 'GA01SUMO-FI003-03-CTDMO0999',
                                                        port_type=PortTypeEnum.PAYLOAD,
                                                        ip_address='0')

        # now create the Deployment
        deployment_obj = IonObject(RT.Deployment,
                                   name='TestInstrumentDeployment',
                                   description='some new deployment',
                                   context=IonObject(OT.CabledInstrumentDeploymentContext),
                                   constraint_list=[temporal_bounds],
                                   port_assignments={self.instrument_device_id:platform_port_obj})

        instrument_deployment_id = self.omsclient.create_deployment(deployment=deployment_obj, site_id=instrument_site_id, device_id=self.instrument_device_id)
        log.debug('_create_instrument_site_and_deployment  deployment_id: %s', instrument_deployment_id)




    def _start_port_agent(self, instrument_agent_instance_obj=None):
        """
        Construct and start the port agent, ONLY NEEDED FOR INSTRUMENT AGENTS.
        """

        _port_agent_config = instrument_agent_instance_obj.port_agent_config

        # It blocks until the port agent starts up or a timeout
        _pagent = PortAgentProcess.launch_process(_port_agent_config,  test_mode = True)
        pid = _pagent.get_pid()
        port = _pagent.get_data_port()
        cmd_port = _pagent.get_command_port()
        log.info("IMS:_start_pagent returned from PortAgentProcess.launch_process pid: %s ", pid)

        # Hack to get ready for DEMO.  Further though needs to be put int
        # how we pass this config info around.
        host = 'localhost'

        driver_config = instrument_agent_instance_obj.driver_config
        comms_config = driver_config.get('comms_config')
        if comms_config:
            host = comms_config.get('addr')
        else:
            log.warn("No comms_config specified, using '%s'" % host)

        # Configure driver to use port agent port number.
        instrument_agent_instance_obj.driver_config['comms_config'] = {
            'addr' : host,
            'cmd_port' : cmd_port,
            'port' : port
        }
        instrument_agent_instance_obj.driver_config['pagent_pid'] = pid
        self.imsclient.update_instrument_agent_instance(instrument_agent_instance_obj)
        return self.imsclient.read_instrument_agent_instance(instrument_agent_instance_obj._id)


    def _start_platform(self):
        """
        Starts the given platform waiting for it to transition to the
        UNINITIALIZED state (note that the agent starts in the LAUNCHING state).

        More in concrete the sequence of steps here are:
        - prepares subscriber to receive the UNINITIALIZED state transition
        - launches the platform process
        - waits for the start of the process
        - waits for the transition to the UNINITIALIZED state
        """
        ##############################################################
        # prepare to receive the UNINITIALIZED state transition:
        async_res = AsyncResult()

        def consume_event(evt, *args, **kwargs):
            log.debug("Got ResourceAgentStateEvent %s from origin %r", evt.state, evt.origin)
            if evt.state == PlatformAgentState.UNINITIALIZED:
                async_res.set(evt)

        # start subscriber:
        sub = EventSubscriber(event_type="ResourceAgentStateEvent",
                              origin=self.platform_device_id,
                              callback=consume_event)
        sub.start()
        log.info("registered event subscriber to wait for state=%r from origin %r",
                 PlatformAgentState.UNINITIALIZED, self.platform_device_id)
        #self._event_subscribers.append(sub)
        sub._ready_event.wait(timeout=EVENT_TIMEOUT)

        ##############################################################
        # now start the platform:
        agent_instance_id = self.platform_agent_instance_id
        log.debug("about to call start_platform_agent_instance with id=%s", agent_instance_id)
        pid = self.imsclient.start_platform_agent_instance(platform_agent_instance_id=agent_instance_id)
        log.debug("start_platform_agent_instance returned pid=%s", pid)

        #wait for start
        agent_instance_obj = self.imsclient.read_platform_agent_instance(agent_instance_id)
        gate = AgentProcessStateGate(self.processdispatchclient.read_process,
                                     self.platform_device_id,
                                     ProcessStateEnum.RUNNING)
        self.assertTrue(gate.await(90), "The platform agent instance did not spawn in 90 seconds")

        # Start a resource agent client to talk with the agent.
        self._pa_client = ResourceAgentClient(self.platform_device_id,
                                              name=gate.process_id,
                                              process=FakeProcess())
        log.debug("got platform agent client %s", str(self._pa_client))

        ##############################################################
        # wait for the UNINITIALIZED event:
        async_res.get(timeout=self._receive_timeout)

    def _register_oms_listener(self):

        #load the paramaters and the param dicts necesssary for the VEL3D
        log.debug( "---------- connect_to_oms ---------- ")
        log.debug("oms_uri = %s", OMS_URI)
        self.oms = CIOMSClientFactory.create_instance(OMS_URI)

        #buddha url
        url = "http://10.22.88.168:5000/ion-service/oms_event"
        log.info("test_oms_events_receive:setup http url %s", url)

        result = self.oms.event.register_event_listener(url)
        log.debug("_register_oms_listener register_event_listener result %s", result)

        #-------------------------------------------------------------------------------------
        # Set up the subscriber to catch the alert event
        #-------------------------------------------------------------------------------------

        def callback_for_alert(event, *args, **kwargs):
            log.debug("caught an OMSDeviceStatusEvent: %s", event)
            self.catch_alert.put(event)

        self.event_subscriber = EventSubscriber(event_type='OMSDeviceStatusEvent',
            callback=callback_for_alert)

        self.event_subscriber.start()
        self.addCleanup(self.event_subscriber.stop)


        result = self.oms.event.generate_test_event({'platform_id': 'fake_platform_id', 'message': "fake event triggered from CI using OMS' generate_test_event", 'severity': '3', 'group ': 'power'})
        log.debug("_register_oms_listener generate_test_event result %s", result)


    def _stop_platform(self):
        try:
            self.IMS.stop_platform_agent_instance(self.platform_agent_instance_id)
        except Exception:
            log.warn(
                "platform_id=%r: Exception in IMS.stop_platform_agent_instance with "
                "platform_agent_instance_id = %r. Perhaps already dead.",
                self.platform_device_id, self.platform_agent_instance_id)
class TestPlatformInstrument(BaseIntTestPlatform):

    def setUp(self):
        self._start_container()

        self._pp = pprint.PrettyPrinter()

        log.debug("oms_uri = %s", OMS_URI)
        self.oms = CIOMSClientFactory.create_instance(OMS_URI)

        self._get_platform_attributes()

        url = OmsTestMixin.start_http_server()
        log.info("TestPlatformInstrument:setup http url %s", url)

        result = self.oms.event.register_event_listener(url)
        log.info("TestPlatformInstrument:setup register_event_listener result %s", result)

#        response = self.oms.port.get_platform_ports('LPJBox_CI_Ben_Hall')
#        log.info("TestPlatformInstrument:setup get_platform_ports %s", response)

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.pubsubclient =  PubsubManagementServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
        self.datasetclient =  DatasetManagementServiceClient(node=self.container.node)
        self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node)
        self.dpclient = DataProductManagementServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.dataset_management = DatasetManagementServiceClient()
        self.RR2  = EnhancedResourceRegistryClient(self.rrclient)

        self.org_id = self.RR2.create(any_old(RT.Org))
        log.debug("Org created: %s", self.org_id)

        # see _set_receive_timeout
        self._receive_timeout = 177

        self.instrument_device = ''
        self.platform_device = ''
        self.platform_agent_instance_id = ''
        self._pa_client = ''

        def done():
            CIOMSClientFactory.destroy_instance(self.oms)
            event_notifications = OmsTestMixin.stop_http_server()
            log.info("event_notifications = %s" % str(event_notifications))

        self.addCleanup(done)

    def _get_platform_attributes(self):
        attr_infos = self.oms.attr.get_platform_attributes('LPJBox_CI_Ben_Hall')
        log.debug('_get_platform_attributes: %s', self._pp.pformat(attr_infos))

#        ret_infos = attr_infos['LPJBox_CI_Ben_Hall']
#        for attrName, attr_defn in ret_infos.iteritems():
#            attr = AttrNode(attrName, attr_defn)
#            pnode.add_attribute(attr)
        return attr_infos

    @unittest.skip('Still in construction...')
    def test_platform_with_instrument_streaming(self):
        #
        # The following is with just a single platform and the single
        # instrument "SBE37_SIM_08", which corresponds to the one on port 4008.
        #

        #load the paramaters and the param dicts necesssary for the VEL3D
        self._load_params()

        #create the instrument device/agent/mode
        self._create_instrument_resources()

        #create the platform device, agent and instance
        self._create_platform_configuration('LPJBox_CI_Ben_Hall')

        self.rrclient.create_association(subject=self.platform_device, predicate=PRED.hasDevice, object=self.instrument_device)


        self._start_platform()
#        self.addCleanup(self._stop_platform, p_root)

        # get everything in command mode:
        self._ping_agent()
        self._initialize()


        _ia_client = ResourceAgentClient(self.instrument_device, process=FakeProcess())
        state = _ia_client.get_agent_state()
        log.info("TestPlatformInstrument get_agent_state %s", state)


        self._go_active()
#        self._run()

        gevent.sleep(3)

        # note that this includes the instrument also getting to the command state

#        self._stream_instruments()

        # get client to the instrument:
        # the i_obj is a DotDict with various pieces captured during the
        # set-up of the instrument, in particular instrument_device_id
        #i_obj = self._get_instrument(instr_key)

#        log.debug("KK creating ResourceAgentClient")
#        ia_client = ResourceAgentClient(i_obj.instrument_device_id,
#                                        process=FakeProcess())
#        log.debug("KK got ResourceAgentClient: %s", ia_client)
#
#        # verify the instrument is command state:
#        state = ia_client.get_agent_state()
#        log.debug("KK instrument state: %s", state)
#        self.assertEqual(state, ResourceAgentState.COMMAND)



        self._reset()
        self._shutdown()




    def _load_params(self):

        log.info("--------------------------------------------------------------------------------------------------------")
        # load_parameter_scenarios
        self.container.spawn_process("Loader", "ion.processes.bootstrap.ion_loader", "IONLoader", config=dict(
            op="load",
            scenario="BETA",
            path="master",
            categories="ParameterFunctions,ParameterDefs,ParameterDictionary,StreamDefinition",
            clearcols="owner_id,org_ids",
            assets="res/preload/r2_ioc/ooi_assets",
            parseooi="True",
        ))

    def _create_platform_configuration(self, platform_id, parent_platform_id=None):
        """
        This method is an adaptation of test_agent_instance_config in
        test_instrument_management_service_integration.py

        @param platform_id
        @param parent_platform_id
        @return a DotDict with various of the constructed elements associated
                to the platform.
        """

        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()

        param_dict_name = 'platform_eng_parsed'
        parsed_rpdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            param_dict_name,
            id_only=True)
        self.parsed_stream_def_id = self.pubsubclient.create_stream_definition(
            name='parsed',
            parameter_dictionary_id=parsed_rpdict_id)


        driver_config = PLTFRM_DVR_CONFIG
        driver_config['attributes'] = self._get_platform_attributes()    #self._platform_attributes[platform_id]
        #OMS returning an error for port.get_platform_ports
        #driver_config['ports']      = self._platform_ports[platform_id]
        log.debug("driver_config: %s", driver_config)

        # instance creation
        platform_agent_instance_obj = any_old(RT.PlatformAgentInstance, {
            'driver_config': driver_config})

        platform_agent_instance_obj.agent_config = {
                'platform_config': { 'platform_id': 'LPJBox_CI_Ben_Hall', 'parent_platform_id':  None }
            }

        self.platform_agent_instance_id = self.imsclient.create_platform_agent_instance(platform_agent_instance_obj)

        # agent creation
        platform_agent_obj = any_old(RT.PlatformAgent, {
            "stream_configurations": self._get_platform_stream_configs(),
            'driver_module':         PLTFRM_DVR_MOD,
            'driver_class':          PLTFRM_DVR_CLS})
        platform_agent_id = self.imsclient.create_platform_agent(platform_agent_obj)

        # device creation
        self.platform_device = self.imsclient.create_platform_device(any_old(RT.PlatformDevice))

        # data product creation
        dp_obj = any_old(RT.DataProduct, {"temporal_domain":tdom, "spatial_domain": sdom})
        dp_id = self.dpclient.create_data_product(data_product=dp_obj, stream_definition_id=self.parsed_stream_def_id)
        self.damsclient.assign_data_product(input_resource_id=self.platform_device, data_product_id=dp_id)
        self.dpclient.activate_data_product_persistence(data_product_id=dp_id)
        self.addCleanup(self.dpclient.delete_data_product, dp_id)

        # assignments
        self.RR2.assign_platform_agent_instance_to_platform_device_with_has_agent_instance(self.platform_agent_instance_id, self.platform_device)
        self.RR2.assign_platform_agent_to_platform_agent_instance_with_has_agent_definition(platform_agent_id, self.platform_agent_instance_id)
        self.RR2.assign_platform_device_to_org_with_has_resource(self.platform_agent_instance_id, self.org_id)

        #######################################
        # dataset

        log.debug('data product = %s', dp_id)

        stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStream, None, True)
        log.debug('Data product stream_ids = %s', stream_ids)
        stream_id = stream_ids[0]

        # Retrieve the id of the OUTPUT stream from the out Data Product
        dataset_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasDataset, RT.Dataset, True)
        log.debug('Data set for data_product_id1 = %s', dataset_ids[0])
        #######################################


        return

    def _create_instrument_resources(self):
        # Create InstrumentModel
        instModel_obj = IonObject(RT.InstrumentModel,
            name='VEL3D',
            description="VEL3D")
        instModel_id = self.imsclient.create_instrument_model(instModel_obj)
        log.debug( 'new InstrumentModel id = %s ', instModel_id)


        raw_config = StreamConfiguration(stream_name='raw', parameter_dictionary_name='raw' )
        vel3d_b_sample = StreamConfiguration(stream_name='vel3d_b_sample', parameter_dictionary_name='vel3d_b_sample')
        vel3d_b_engineering = StreamConfiguration(stream_name='vel3d_b_engineering', parameter_dictionary_name='vel3d_b_engineering')


        # Create InstrumentAgent
        instAgent_obj = IonObject(RT.InstrumentAgent,
            name='agent007',
            description="SBE37IMAgent",
            driver_uri="http://sddevrepo.oceanobservatories.org/releases/nobska_mavs4_ooicore-0.0.7-py2.7.egg",
            stream_configurations = [raw_config, vel3d_b_sample, vel3d_b_engineering])
        instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj)
        log.debug('new InstrumentAgent id = %s', instAgent_id)

        self.imsclient.assign_instrument_model_to_instrument_agent(instModel_id, instAgent_id)

        # Create InstrumentDevice
        instDevice_obj = IonObject(RT.InstrumentDevice,
            name='VEL3DDevice',
            description="VEL3DDevice",
            serial_number="12345" )
        self.instrument_device = self.imsclient.create_instrument_device(instrument_device=instDevice_obj)
        self.imsclient.assign_instrument_model_to_instrument_device(instModel_id, self.instrument_device)

        port_agent_config = {
            'device_addr':  '10.180.80.6',
            'device_port':  2101,
            'process_type': PortAgentProcessType.UNIX,
            'binary_path': "port_agent",
            'port_agent_addr': 'localhost',
            'command_port': 1025,
            'data_port': 1026,
            'log_level': 5,
            'type': PortAgentType.ETHERNET
        }

        instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='VEL3DAgentInstance',
            description="VEL3DAgentInstance",
            port_agent_config = port_agent_config,
            alerts= [])


        instAgentInstance_id = self.imsclient.create_instrument_agent_instance(instAgentInstance_obj,
            instAgent_id,
            self.instrument_device)
        self._start_port_agent(self.imsclient.read_instrument_agent_instance(instAgentInstance_id))

        vel3d_b_sample_pdict_id = self.dataset_management.read_parameter_dictionary_by_name('vel3d_b_sample', id_only=True)
        vel3d_b_sample_stream_def_id = self.pubsubclient.create_stream_definition(name='vel3d_b_sample', parameter_dictionary_id=vel3d_b_sample_pdict_id)

        vel3d_b_engineering_pdict_id = self.dataset_management.read_parameter_dictionary_by_name('vel3d_b_engineering', id_only=True)
        vel3d_b_engineering_stream_def_id = self.pubsubclient.create_stream_definition(name='vel3d_b_engineering', parameter_dictionary_id=vel3d_b_engineering_pdict_id)

        raw_pdict_id = self.dataset_management.read_parameter_dictionary_by_name('raw', id_only=True)
        raw_stream_def_id = self.pubsubclient.create_stream_definition(name='raw', parameter_dictionary_id=raw_pdict_id)


        #-------------------------------
        # Create Raw and Parsed Data Products for the device
        #-------------------------------
        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()

        dp_obj = IonObject(RT.DataProduct,
            name='vel3d_b_sample',
            description='vel3d_b_sample',
            temporal_domain = tdom,
            spatial_domain = sdom)

        data_product_id1 = self.dpclient.create_data_product(data_product=dp_obj, stream_definition_id=vel3d_b_sample_stream_def_id)
        self.damsclient.assign_data_product(input_resource_id=self.instrument_device, data_product_id=data_product_id1)
        self.dpclient.activate_data_product_persistence(data_product_id=data_product_id1)


        dp_obj = IonObject(RT.DataProduct,
            name='vel3d_b_engineering',
            description='vel3d_b_engineering',
            temporal_domain = tdom,
            spatial_domain = sdom)

        data_product_id2 = self.dpclient.create_data_product(data_product=dp_obj, stream_definition_id=vel3d_b_engineering_stream_def_id)
        self.damsclient.assign_data_product(input_resource_id=self.instrument_device, data_product_id=data_product_id2)
        self.dpclient.activate_data_product_persistence(data_product_id=data_product_id2)


        dp_obj = IonObject(RT.DataProduct,
            name='the raw data',
            description='raw stream test',
            temporal_domain = tdom,
            spatial_domain = sdom)

        data_product_id3 = self.dpclient.create_data_product(data_product=dp_obj, stream_definition_id=raw_stream_def_id)
        self.damsclient.assign_data_product(input_resource_id=self.instrument_device, data_product_id=data_product_id3)
        self.dpclient.activate_data_product_persistence(data_product_id=data_product_id3)





    def _start_port_agent(self, instrument_agent_instance_obj=None):
        """
        Construct and start the port agent, ONLY NEEDED FOR INSTRUMENT AGENTS.
        """

        _port_agent_config = instrument_agent_instance_obj.port_agent_config

        # It blocks until the port agent starts up or a timeout
        _pagent = PortAgentProcess.launch_process(_port_agent_config,  test_mode = True)
        pid = _pagent.get_pid()
        port = _pagent.get_data_port()
        cmd_port = _pagent.get_command_port()
        log.info("IMS:_start_pagent returned from PortAgentProcess.launch_process pid: %s ", pid)

        # Hack to get ready for DEMO.  Further though needs to be put int
        # how we pass this config info around.
        host = 'localhost'

        driver_config = instrument_agent_instance_obj.driver_config
        comms_config = driver_config.get('comms_config')
        if comms_config:
            host = comms_config.get('addr')
        else:
            log.warn("No comms_config specified, using '%s'" % host)

        # Configure driver to use port agent port number.
        instrument_agent_instance_obj.driver_config['comms_config'] = {
            'addr' : host,
            'cmd_port' : cmd_port,
            'port' : port
        }
        instrument_agent_instance_obj.driver_config['pagent_pid'] = pid
        self.imsclient.update_instrument_agent_instance(instrument_agent_instance_obj)
        return self.imsclient.read_instrument_agent_instance(instrument_agent_instance_obj._id)









    def _start_platform(self):
        """
        Starts the given platform waiting for it to transition to the
        UNINITIALIZED state (note that the agent starts in the LAUNCHING state).

        More in concrete the sequence of steps here are:
        - prepares subscriber to receive the UNINITIALIZED state transition
        - launches the platform process
        - waits for the start of the process
        - waits for the transition to the UNINITIALIZED state
        """
        ##############################################################
        # prepare to receive the UNINITIALIZED state transition:
        async_res = AsyncResult()

        def consume_event(evt, *args, **kwargs):
            log.debug("Got ResourceAgentStateEvent %s from origin %r", evt.state, evt.origin)
            if evt.state == PlatformAgentState.UNINITIALIZED:
                async_res.set(evt)

        # start subscriber:
        sub = EventSubscriber(event_type="ResourceAgentStateEvent",
                              origin=self.platform_device,
                              callback=consume_event)
        sub.start()
        log.info("registered event subscriber to wait for state=%r from origin %r",
                 PlatformAgentState.UNINITIALIZED, self.platform_device)
        #self._event_subscribers.append(sub)
        sub._ready_event.wait(timeout=EVENT_TIMEOUT)

        ##############################################################
        # now start the platform:
        agent_instance_id = self.platform_agent_instance_id
        log.debug("about to call start_platform_agent_instance with id=%s", agent_instance_id)
        pid = self.imsclient.start_platform_agent_instance(platform_agent_instance_id=agent_instance_id)
        log.debug("start_platform_agent_instance returned pid=%s", pid)

        #wait for start
        agent_instance_obj = self.imsclient.read_platform_agent_instance(agent_instance_id)
        gate = AgentProcessStateGate(self.processdispatchclient.read_process,
                                     self.platform_device._id,
                                     ProcessStateEnum.RUNNING)
        self.assertTrue(gate.await(90), "The platform agent instance did not spawn in 90 seconds")

        # Start a resource agent client to talk with the agent.
        self._pa_client = ResourceAgentClient(self.platform_device,
                                              name=gate.process_id,
                                              process=FakeProcess())
        log.debug("got platform agent client %s", str(self._pa_client))

        ##############################################################
        # wait for the UNINITIALIZED event:
        async_res.get(timeout=self._receive_timeout)
class TestAgentLaunchOps(IonIntegrationTestCase):
    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()
        #container = Container()
        #print 'starting container'
        #container.start()
        #print 'started container'
        unittest  # suppress an pycharm inspector error if all unittest.skip references are commented out

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        self.RR = ResourceRegistryServiceClient(node=self.container.node)
        self.IMS = InstrumentManagementServiceClient(node=self.container.node)
        self.IDS = IdentityManagementServiceClient(node=self.container.node)
        self.PSC = PubsubManagementServiceClient(node=self.container.node)
        self.DP = DataProductManagementServiceClient(node=self.container.node)
        self.DAMS = DataAcquisitionManagementServiceClient(
            node=self.container.node)
        self.DSC = DatasetManagementServiceClient(node=self.container.node)
        self.PDC = ProcessDispatcherServiceClient(node=self.container.node)
        self.OMS = ObservatoryManagementServiceClient(node=self.container.node)
        self.RR2 = EnhancedResourceRegistryClient(self.RR)

#    @unittest.skip('this test just for debugging setup')
#    def test_just_the_setup(self):
#        return

    def test_get_agent_client_noprocess(self):
        inst_device_id = self.RR2.create(any_old(RT.InstrumentDevice))

        iap = ResourceAgentClient._get_agent_process_id(inst_device_id)

        # should be no running agent
        self.assertIsNone(iap)

        # should raise NotFound
        self.assertRaises(NotFound, ResourceAgentClient, inst_device_id)

    def test_resource_state_save_restore(self):

        # Create InstrumentModel
        instModel_obj = IonObject(RT.InstrumentModel,
                                  name='SBE37IMModel',
                                  description="SBE37IMModel")
        instModel_id = self.IMS.create_instrument_model(instModel_obj)
        log.debug('new InstrumentModel id = %s ', instModel_id)

        # Create InstrumentAgent
        raw_config = StreamConfiguration(
            stream_name='raw', parameter_dictionary_name='ctd_raw_param_dict')
        parsed_config = StreamConfiguration(
            stream_name='parsed',
            parameter_dictionary_name='ctd_parsed_param_dict')
        instAgent_obj = IonObject(
            RT.InstrumentAgent,
            name='agent007',
            description="SBE37IMAgent",
            driver_uri=DRV_URI_GOOD,
            stream_configurations=[raw_config, parsed_config])
        instAgent_id = self.IMS.create_instrument_agent(instAgent_obj)
        log.debug('new InstrumentAgent id = %s', instAgent_id)

        self.IMS.assign_instrument_model_to_instrument_agent(
            instModel_id, instAgent_id)

        # Create InstrumentDevice
        log.debug(
            'test_activateInstrumentSample: Create instrument resource to represent the SBE37 '
            + '(SA Req: L4-CI-SA-RQ-241) ')
        instDevice_obj = IonObject(RT.InstrumentDevice,
                                   name='SBE37IMDevice',
                                   description="SBE37IMDevice",
                                   serial_number="12345")
        instDevice_id = self.IMS.create_instrument_device(
            instrument_device=instDevice_obj)
        self.IMS.assign_instrument_model_to_instrument_device(
            instModel_id, instDevice_id)

        log.debug(
            "test_activateInstrumentSample: new InstrumentDevice id = %s    (SA Req: L4-CI-SA-RQ-241) ",
            instDevice_id)

        port_agent_config = {
            'device_addr': CFG.device.sbe37.host,
            'device_port': CFG.device.sbe37.port,
            'process_type': PortAgentProcessType.UNIX,
            'binary_path': "port_agent",
            'port_agent_addr': 'localhost',
            'command_port': CFG.device.sbe37.port_agent_cmd_port,
            'data_port': CFG.device.sbe37.port_agent_data_port,
            'log_level': 5,
            'type': PortAgentType.ETHERNET
        }

        instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance,
                                          name='SBE37IMAgentInstance',
                                          description="SBE37IMAgentInstance",
                                          port_agent_config=port_agent_config)

        instAgentInstance_id = self.IMS.create_instrument_agent_instance(
            instAgentInstance_obj, instAgent_id, instDevice_id)

        spdict_id = self.DSC.read_parameter_dictionary_by_name(
            'ctd_parsed_param_dict', id_only=True)
        parsed_stream_def_id = self.PSC.create_stream_definition(
            name='parsed', parameter_dictionary_id=spdict_id)

        rpdict_id = self.DSC.read_parameter_dictionary_by_name(
            'ctd_raw_param_dict', id_only=True)
        raw_stream_def_id = self.PSC.create_stream_definition(
            name='raw', parameter_dictionary_id=rpdict_id)

        #-------------------------------
        # Create Raw and Parsed Data Products for the device
        #-------------------------------

        dp_obj = IonObject(RT.DataProduct,
                           name='the parsed data',
                           description='ctd stream test')

        data_product_id1 = self.DP.create_data_product(
            data_product=dp_obj, stream_definition_id=parsed_stream_def_id)

        log.debug('new dp_id = %s', data_product_id1)

        self.DAMS.assign_data_product(input_resource_id=instDevice_id,
                                      data_product_id=data_product_id1)
        self.DP.activate_data_product_persistence(
            data_product_id=data_product_id1)
        self.addCleanup(self.DP.suspend_data_product_persistence,
                        data_product_id1)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.RR.find_objects(data_product_id1, PRED.hasStream,
                                             None, True)
        log.debug('Data product streams1 = %s', stream_ids)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        dataset_ids, _ = self.RR.find_objects(data_product_id1,
                                              PRED.hasDataset, RT.Dataset,
                                              True)
        log.debug('Data set for data_product_id1 = %s', dataset_ids[0])
        self.parsed_dataset = dataset_ids[0]
        #create the datastore at the beginning of each int test that persists data

        dp_obj = IonObject(RT.DataProduct,
                           name='the raw data',
                           description='raw stream test')

        data_product_id2 = self.DP.create_data_product(
            data_product=dp_obj, stream_definition_id=raw_stream_def_id)
        log.debug('new dp_id = %s', str(data_product_id2))

        self.DAMS.assign_data_product(input_resource_id=instDevice_id,
                                      data_product_id=data_product_id2)

        self.DP.activate_data_product_persistence(
            data_product_id=data_product_id2)
        self.addCleanup(self.DP.suspend_data_product_persistence,
                        data_product_id2)

        # spin up agent
        self.IMS.start_instrument_agent_instance(
            instrument_agent_instance_id=instAgentInstance_id)

        self.addCleanup(self.IMS.stop_instrument_agent_instance,
                        instrument_agent_instance_id=instAgentInstance_id)

        #wait for start
        instance_obj = self.IMS.read_instrument_agent_instance(
            instAgentInstance_id)
        gate = AgentProcessStateGate(self.PDC.read_process, instDevice_id,
                                     ProcessStateEnum.RUNNING)
        self.assertTrue(
            gate. await (30),
            "The instrument agent instance (%s) did not spawn in 30 seconds" %
            gate.process_id)

        # take snapshot of config
        snap_id = self.IMS.save_resource_state(instDevice_id, "xyzzy snapshot")
        snap_obj = self.RR.read_attachment(snap_id, include_content=True)

        #modify config
        instance_obj.driver_config["comms_config"] = "BAD_DATA"
        self.RR.update(instance_obj)

        #restore config
        self.IMS.restore_resource_state(instDevice_id, snap_id)
        instance_obj = self.RR.read(instAgentInstance_id)

        if "BAD_DATA" == instance_obj.driver_config["comms_config"]:
            print "Saved config:"
            print snap_obj.content
            self.fail("Saved config was not properly restored")

        self.assertNotEqual("BAD_DATA",
                            instance_obj.driver_config["comms_config"])

        self.DP.delete_data_product(data_product_id1)
        self.DP.delete_data_product(data_product_id2)

    def test_agent_instance_config_hasDevice(self):
        def assign_fn(child_device_id, parent_device_id):
            self.RR2.create_association(parent_device_id, PRED.hasDevice,
                                        child_device_id)

        def find_fn(parent_device_id):
            ret, _ = self.RR.find_objects(subject=parent_device_id,
                                          predicate=PRED.hasDevice,
                                          id_only=True)
            return ret

        self.base_agent_instance_config(assign_fn, find_fn)
        log.info("END test_agent_instance_config_hasDevice")

    def test_agent_instance_config_hasNetworkParent(self):
        def assign_fn(child_device_id, parent_device_id):
            self.RR2.create_association(child_device_id, PRED.hasNetworkParent,
                                        parent_device_id)

        def find_fn(parent_device_id):
            ret, _ = self.RR.find_subjects(object=parent_device_id,
                                           predicate=PRED.hasNetworkParent,
                                           id_only=True)
            return ret

        self.base_agent_instance_config(assign_fn, find_fn)
        log.info("END test_agent_instance_config_hasNetworkParent")

    def base_agent_instance_config(
            self, assign_child_platform_to_parent_platform_fn,
            find_child_platform_ids_of_parent_platform_fn):
        """
        Verify that agent configurations are being built properly
        """
        clients = DotDict()
        clients.resource_registry = self.RR
        clients.pubsub_management = self.PSC
        clients.dataset_management = self.DSC
        config_builder = DotDict
        config_builder.i = None
        config_builder.p = None

        def refresh_pconfig_builder_hack(config_builder):
            """
            ugly hack to get around "idempotent" RR2 caching
            remove after https://github.com/ooici/coi-services/pull/1190
            """
            config_builder.p = PlatformAgentConfigurationBuilder(clients)

        def refresh_iconfig_builder_hack(config_builder):
            """
            ugly hack to get around "idempotent" RR2 caching
            remove after https://github.com/ooici/coi-services/pull/1190
            """
            config_builder.i = InstrumentAgentConfigurationBuilder(clients)

        org_obj = any_old(RT.Org)
        org_id = self.RR2.create(org_obj)

        inst_startup_config = {'startup': 'config'}

        generic_alerts_config = [{'lvl2': 'lvl3val'}]

        required_config_keys = [
            'org_governance_name', 'device_type', 'agent', 'driver_config',
            'stream_config', 'startup_config', 'aparam_alerts_config',
            'children'
        ]

        def verify_instrument_config(config, device_id):
            for key in required_config_keys:
                self.assertIn(key, config)
            self.assertEqual(org_obj.org_governance_name,
                             config['org_governance_name'])
            self.assertEqual(RT.InstrumentDevice, config['device_type'])
            self.assertIn('driver_config', config)
            driver_config = config['driver_config']
            expected_driver_fields = {
                'process_type': ('ZMQPyClassDriverLauncher', ),
            }
            for k, v in expected_driver_fields.iteritems():
                self.assertIn(k, driver_config)
                self.assertEqual(v, driver_config[k])
            self.assertEqual

            self.assertIn('resource_id', config['agent'])
            self.assertEqual(device_id, config['agent']['resource_id'])
            self.assertEqual(inst_startup_config, config['startup_config'])
            self.assertIn('aparam_alerts_config', config)
            self.assertEqual(generic_alerts_config,
                             config['aparam_alerts_config'])
            self.assertIn('stream_config', config)
            for key in ['children']:
                self.assertEqual({}, config[key])

        # TODO(OOIION-1495) review the asserts below related with
        # requiring 'ports' to be present in the driver_config.
        # See recent adjustment in agent_configuration_builder.py,
        # which I did to avoid other tests to fail.
        # The asserts below would make the following tests fail:
        #  test_agent_instance_config_hasDevice
        #  test_agent_instance_config_hasNetworkParent

        def verify_child_config(config, device_id, inst_device_id=None):
            for key in required_config_keys:
                self.assertIn(key, config)
            self.assertEqual(org_obj.org_governance_name,
                             config['org_governance_name'])
            self.assertEqual(RT.PlatformDevice, config['device_type'])
            self.assertIn('resource_id', config['agent'])
            self.assertEqual(device_id, config['agent']['resource_id'])
            self.assertIn('aparam_alerts_config', config)
            self.assertEqual(generic_alerts_config,
                             config['aparam_alerts_config'])
            self.assertIn('stream_config', config)
            self.assertIn('driver_config', config)
            self.assertIn('foo', config['driver_config'])
            """
            self.assertIn('ports', config['driver_config'])
            """
            self.assertEqual('bar', config['driver_config']['foo'])
            self.assertIn('process_type', config['driver_config'])
            self.assertEqual(('ZMQPyClassDriverLauncher', ),
                             config['driver_config']['process_type'])

            if None is inst_device_id:
                for key in ['children', 'startup_config']:
                    self.assertEqual({}, config[key])
            else:
                for key in ['startup_config']:
                    self.assertEqual({}, config[key])

                self.assertIn(inst_device_id, config['children'])
                verify_instrument_config(config['children'][inst_device_id],
                                         inst_device_id)
            """
            if config['driver_config']['ports']:
                self.assertTrue( isinstance(config['driver_config']['ports'], dict) )
            """

        def verify_parent_config(config,
                                 parent_device_id,
                                 child_device_id,
                                 inst_device_id=None):
            for key in required_config_keys:
                self.assertIn(key, config)
            self.assertEqual(org_obj.org_governance_name,
                             config['org_governance_name'])
            self.assertEqual(RT.PlatformDevice, config['device_type'])
            self.assertIn('process_type', config['driver_config'])
            """
            self.assertIn('ports', config['driver_config'])
            """
            self.assertEqual(('ZMQPyClassDriverLauncher', ),
                             config['driver_config']['process_type'])
            self.assertIn('resource_id', config['agent'])
            self.assertEqual(parent_device_id, config['agent']['resource_id'])
            self.assertIn('aparam_alerts_config', config)
            self.assertEqual(generic_alerts_config,
                             config['aparam_alerts_config'])
            self.assertIn('stream_config', config)
            for key in ['startup_config']:
                self.assertEqual({}, config[key])
            """
            if config['driver_config']['ports']:
                self.assertTrue( isinstance(config['driver_config']['ports'], dict) )
            """
            self.assertIn(child_device_id, config['children'])
            verify_child_config(config['children'][child_device_id],
                                child_device_id, inst_device_id)

        rpdict_id = self.DSC.read_parameter_dictionary_by_name(
            'ctd_raw_param_dict', id_only=True)
        raw_stream_def_id = self.PSC.create_stream_definition(
            name='raw', parameter_dictionary_id=rpdict_id)

        #todo: create org and figure out which agent resource needs to get assigned to it

        def _make_platform_agent_structure(name='', agent_config=None):
            if None is agent_config: agent_config = {}

            # instance creation
            platform_agent_instance_obj = any_old(
                RT.PlatformAgentInstance, {
                    'driver_config': {
                        'foo': 'bar'
                    },
                    'alerts': generic_alerts_config
                })
            platform_agent_instance_obj.agent_config = agent_config
            platform_agent_instance_id = self.IMS.create_platform_agent_instance(
                platform_agent_instance_obj)

            # agent creation
            raw_config = StreamConfiguration(
                stream_name='raw',
                parameter_dictionary_name='ctd_raw_param_dict')
            platform_agent_obj = any_old(
                RT.PlatformAgent, {"stream_configurations": [raw_config]})
            platform_agent_id = self.IMS.create_platform_agent(
                platform_agent_obj)

            # device creation
            platform_device_id = self.IMS.create_platform_device(
                any_old(RT.PlatformDevice))

            # data product creation
            dp_obj = any_old(RT.DataProduct)
            dp_id = self.DP.create_data_product(
                data_product=dp_obj, stream_definition_id=raw_stream_def_id)
            self.DAMS.assign_data_product(input_resource_id=platform_device_id,
                                          data_product_id=dp_id)
            self.DP.activate_data_product_persistence(data_product_id=dp_id)
            self.addCleanup(self.DP.suspend_data_product_persistence, dp_id)

            #deployment creation
            site_obj = IonObject(RT.PlatformSite, name='sitePlatform')
            site_id = self.OMS.create_platform_site(platform_site=site_obj)

            # find current deployment using time constraints
            current_time = int(calendar.timegm(time.gmtime()))
            # two years on either side of current time
            start = current_time - 63115200
            end = current_time + 63115200
            temporal_bounds = IonObject(OT.TemporalBounds,
                                        name='planned',
                                        start_datetime=str(start),
                                        end_datetime=str(end))
            platform_port_obj = IonObject(
                OT.PlatformPort,
                reference_designator='GA01SUMO-FI003-09-CTDMO0999',
                port_type=PortTypeEnum.UPLINK,
                ip_address=0)
            deployment_obj = IonObject(
                RT.Deployment,
                name='TestPlatformDeployment_' + name,
                description='some new deployment',
                context=IonObject(OT.CabledNodeDeploymentContext),
                constraint_list=[temporal_bounds],
                port_assignments={platform_device_id: platform_port_obj})

            deploy_id = self.OMS.create_deployment(
                deployment=deployment_obj,
                site_id=site_id,
                device_id=platform_device_id)

            # assignments
            self.RR2.assign_platform_agent_instance_to_platform_device_with_has_agent_instance(
                platform_agent_instance_id, platform_device_id)
            self.RR2.assign_platform_agent_to_platform_agent_instance_with_has_agent_definition(
                platform_agent_id, platform_agent_instance_id)
            self.RR2.assign_platform_device_to_org_with_has_resource(
                platform_agent_instance_id, org_id)

            return platform_agent_instance_id, platform_agent_id, platform_device_id

        def _make_instrument_agent_structure(agent_config=None):
            if None is agent_config: agent_config = {}

            # instance creation
            instrument_agent_instance_obj = any_old(
                RT.InstrumentAgentInstance, {
                    "startup_config": inst_startup_config,
                    'alerts': generic_alerts_config
                })
            instrument_agent_instance_obj.agent_config = agent_config
            instrument_agent_instance_id = self.IMS.create_instrument_agent_instance(
                instrument_agent_instance_obj)

            # agent creation
            raw_config = StreamConfiguration(
                stream_name='raw',
                parameter_dictionary_name='ctd_raw_param_dict')
            instrument_agent_obj = any_old(
                RT.InstrumentAgent, {"stream_configurations": [raw_config]})
            instrument_agent_id = self.IMS.create_instrument_agent(
                instrument_agent_obj)

            # device creation
            instrument_device_id = self.IMS.create_instrument_device(
                any_old(RT.InstrumentDevice))

            # data product creation
            dp_obj = any_old(RT.DataProduct)
            dp_id = self.DP.create_data_product(
                data_product=dp_obj, stream_definition_id=raw_stream_def_id)
            self.DAMS.assign_data_product(
                input_resource_id=instrument_device_id, data_product_id=dp_id)
            self.DP.activate_data_product_persistence(data_product_id=dp_id)
            self.addCleanup(self.DP.suspend_data_product_persistence, dp_id)

            #deployment creation
            site_obj = IonObject(RT.InstrumentSite, name='siteInstrument')
            site_id = self.OMS.create_instrument_site(instrument_site=site_obj)

            # find current deployment using time constraints
            current_time = int(calendar.timegm(time.gmtime()))
            # two years on either side of current time
            start = current_time - 63115200
            end = current_time + 63115200
            temporal_bounds = IonObject(OT.TemporalBounds,
                                        name='planned',
                                        start_datetime=str(start),
                                        end_datetime=str(end))
            platform_port_obj = IonObject(
                OT.PlatformPort,
                reference_designator='GA01SUMO-FI003-08-CTDMO0888',
                port_type=PortTypeEnum.PAYLOAD,
                ip_address=0)
            deployment_obj = IonObject(
                RT.Deployment,
                name='TestDeployment for Cabled Instrument',
                description='some new deployment',
                context=IonObject(OT.CabledInstrumentDeploymentContext),
                constraint_list=[temporal_bounds],
                port_assignments={instrument_device_id: platform_port_obj})

            deploy_id = self.OMS.create_deployment(
                deployment=deployment_obj,
                site_id=site_id,
                device_id=instrument_device_id)

            # assignments
            self.RR2.assign_instrument_agent_instance_to_instrument_device_with_has_agent_instance(
                instrument_agent_instance_id, instrument_device_id)
            self.RR2.assign_instrument_agent_to_instrument_agent_instance_with_has_agent_definition(
                instrument_agent_id, instrument_agent_instance_id)
            self.RR2.assign_instrument_device_to_org_with_has_resource(
                instrument_agent_instance_id, org_id)

            return instrument_agent_instance_id, instrument_agent_id, instrument_device_id

        # can't do anything without an agent instance obj
        log.debug(
            "Testing that preparing a launcher without agent instance raises an error"
        )
        refresh_pconfig_builder_hack(
            config_builder
        )  # associations have changed since builder was instantiated
        self.assertRaises(AssertionError,
                          config_builder.p.prepare,
                          will_launch=False)

        log.debug(
            "Making the structure for a platform agent, which will be the child"
        )
        platform_agent_instance_child_id, _, platform_device_child_id = _make_platform_agent_structure(
            name='child')
        platform_agent_instance_child_obj = self.RR2.read(
            platform_agent_instance_child_id)

        log.debug("Preparing a valid agent instance launch, for config only")
        refresh_pconfig_builder_hack(
            config_builder
        )  # associations have changed since builder was instantiated
        config_builder.p.set_agent_instance_object(
            platform_agent_instance_child_obj)
        child_config = config_builder.p.prepare(will_launch=False)
        verify_child_config(child_config, platform_device_child_id)

        log.debug(
            "Making the structure for a platform agent, which will be the parent"
        )
        platform_agent_instance_parent_id, _, platform_device_parent_id = _make_platform_agent_structure(
            name='parent')
        platform_agent_instance_parent_obj = self.RR2.read(
            platform_agent_instance_parent_id)

        log.debug("Testing child-less parent as a child config")
        refresh_pconfig_builder_hack(
            config_builder
        )  # associations have changed since builder was instantiated
        config_builder.p.set_agent_instance_object(
            platform_agent_instance_parent_obj)
        parent_config = config_builder.p.prepare(will_launch=False)
        verify_child_config(parent_config, platform_device_parent_id)

        log.debug("assigning child platform to parent")
        assign_child_platform_to_parent_platform_fn(platform_device_child_id,
                                                    platform_device_parent_id)

        child_device_ids = find_child_platform_ids_of_parent_platform_fn(
            platform_device_parent_id)
        self.assertNotEqual(0, len(child_device_ids))

        log.debug("Testing parent + child as parent config")
        refresh_pconfig_builder_hack(
            config_builder
        )  # associations have changed since builder was instantiated
        config_builder.p.set_agent_instance_object(
            platform_agent_instance_parent_obj)
        parent_config = config_builder.p.prepare(will_launch=False)
        verify_parent_config(parent_config, platform_device_parent_id,
                             platform_device_child_id)

        log.debug("making the structure for an instrument agent")
        instrument_agent_instance_id, _, instrument_device_id = _make_instrument_agent_structure(
        )
        instrument_agent_instance_obj = self.RR2.read(
            instrument_agent_instance_id)

        log.debug("Testing instrument config")
        refresh_iconfig_builder_hack(
            config_builder
        )  # associations have changed since builder was instantiated
        config_builder.i.set_agent_instance_object(
            instrument_agent_instance_obj)
        instrument_config = config_builder.i.prepare(will_launch=False)
        verify_instrument_config(instrument_config, instrument_device_id)

        log.debug("assigning instrument to platform")
        self.RR2.assign_instrument_device_to_platform_device_with_has_device(
            instrument_device_id, platform_device_child_id)

        child_device_ids = self.RR2.find_instrument_device_ids_of_platform_device_using_has_device(
            platform_device_child_id)
        self.assertNotEqual(0, len(child_device_ids))

        log.debug("Testing entire config")
        refresh_pconfig_builder_hack(
            config_builder
        )  # associations have changed since builder was instantiated
        config_builder.p.set_agent_instance_object(
            platform_agent_instance_parent_obj)
        full_config = config_builder.p.prepare(will_launch=False)
        verify_parent_config(full_config, platform_device_parent_id,
                             platform_device_child_id, instrument_device_id)

        #self.fail(parent_config)
        #plauncher.prepare(will_launch=False)
        log.info("END base_agent_instance_config")
class TestInstrumentManagementServiceIntegration(IonIntegrationTestCase):

    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()
        #container = Container()
        #print 'starting container'
        #container.start()
        #print 'started container'

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        self.RR   = ResourceRegistryServiceClient(node=self.container.node)
        self.IMS  = InstrumentManagementServiceClient(node=self.container.node)
        self.IDS  = IdentityManagementServiceClient(node=self.container.node)
        self.PSC  = PubsubManagementServiceClient(node=self.container.node)
        self.DP   = DataProductManagementServiceClient(node=self.container.node)
        self.DAMS = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.DSC  = DatasetManagementServiceClient(node=self.container.node)
        self.PDC  = ProcessDispatcherServiceClient(node=self.container.node)

        self.RR2 = EnhancedResourceRegistryClient(self.RR)

#    @unittest.skip('this test just for debugging setup')
#    def test_just_the_setup(self):
#        return

    @attr('EXT')
    def test_resources_associations_extensions(self):
        """
        create one of each resource and association used by IMS
        to guard against problems in ion-definitions
        """
        
        #stuff we control
        instrument_agent_instance_id, _ =  self.RR.create(any_old(RT.InstrumentAgentInstance))
        instrument_agent_id, _ =           self.RR.create(any_old(RT.InstrumentAgent))
        instrument_model_id, _ =           self.RR.create(any_old(RT.InstrumentModel))
        instrument_device_id, _ =          self.RR.create(any_old(RT.InstrumentDevice))
        platform_agent_instance_id, _ =    self.RR.create(any_old(RT.PlatformAgentInstance))
        platform_agent_id, _ =             self.RR.create(any_old(RT.PlatformAgent))
        platform_device_id, _ =            self.RR.create(any_old(RT.PlatformDevice))
        platform_model_id, _ =             self.RR.create(any_old(RT.PlatformModel))
        sensor_device_id, _ =              self.RR.create(any_old(RT.SensorDevice))
        sensor_model_id, _ =               self.RR.create(any_old(RT.SensorModel))

        #stuff we associate to
        data_producer_id, _      = self.RR.create(any_old(RT.DataProducer))
        org_id, _ =                self.RR.create(any_old(RT.Org))

        #instrument_agent_instance_id #is only a target
        
        #instrument_agent
        self.RR.create_association(instrument_agent_id, PRED.hasModel, instrument_model_id)
        self.RR.create_association(instrument_agent_instance_id, PRED.hasAgentDefinition, instrument_agent_id)

        #instrument_device
        self.RR.create_association(instrument_device_id, PRED.hasModel, instrument_model_id)
        self.RR.create_association(instrument_device_id, PRED.hasAgentInstance, instrument_agent_instance_id)
        self.RR.create_association(instrument_device_id, PRED.hasDataProducer, data_producer_id)
        self.RR.create_association(instrument_device_id, PRED.hasDevice, sensor_device_id)
        self.RR.create_association(org_id, PRED.hasResource, instrument_device_id)


        instrument_model_id #is only a target

        platform_agent_instance_id #is only a target
        
        #platform_agent
        self.RR.create_association(platform_agent_id, PRED.hasModel, platform_model_id)
        self.RR.create_association(platform_agent_instance_id, PRED.hasAgentDefinition, platform_agent_id)

        #platform_device
        self.RR.create_association(platform_device_id, PRED.hasModel, platform_model_id)
        self.RR.create_association(platform_device_id, PRED.hasAgentInstance, platform_agent_instance_id)
        self.RR.create_association(platform_device_id, PRED.hasDevice, instrument_device_id)

        platform_model_id #is only a target

        #sensor_device
        self.RR.create_association(sensor_device_id, PRED.hasModel, sensor_model_id)
        self.RR.create_association(sensor_device_id, PRED.hasDevice, instrument_device_id)

        sensor_model_id #is only a target

        #create a parsed product for this instrument output
        tdom, sdom = time_series_domain()
        tdom = tdom.dump()
        sdom = sdom.dump()
        dp_obj = IonObject(RT.DataProduct,
            name='the parsed data',
            description='ctd stream test',
            processing_level_code='Parsed_Canonical',
            temporal_domain = tdom,
            spatial_domain = sdom)
        pdict_id = self.DSC.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        parsed_stream_def_id = self.PSC.create_stream_definition(name='parsed', parameter_dictionary_id=pdict_id)
        data_product_id1 = self.DP.create_data_product(data_product=dp_obj, stream_definition_id=parsed_stream_def_id)
        log.debug( 'new dp_id = %s', data_product_id1)

        self.DAMS.assign_data_product(input_resource_id=instrument_device_id, data_product_id=data_product_id1)


        def addInstOwner(inst_id, subject):

            actor_identity_obj = any_old(RT.ActorIdentity, {"name": subject})
            user_id = self.IDS.create_actor_identity(actor_identity_obj)
            user_info_obj = any_old(RT.UserInfo)
            user_info_id = self.IDS.create_user_info(user_id, user_info_obj)

            self.RR.create_association(inst_id, PRED.hasOwner, user_id)


        #Testing multiple instrument owners
        addInstOwner(instrument_device_id, "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Roger Unwin A254")
        addInstOwner(instrument_device_id, "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Bob Cumbers A256")

        extended_instrument = self.IMS.get_instrument_device_extension(instrument_device_id)

        self.assertEqual(instrument_device_id, extended_instrument._id)
        self.assertEqual(len(extended_instrument.owners), 2)
        self.assertEqual(extended_instrument.instrument_model._id, instrument_model_id)

        # Lifecycle
        self.assertEquals(len(extended_instrument.lcstate_transitions), 5)
        self.assertEquals(set(extended_instrument.lcstate_transitions.keys()), set(['develop', 'deploy', 'retire', 'plan', 'integrate']))
        self.assertEquals(len(extended_instrument.availability_transitions), 2)
        self.assertEquals(set(extended_instrument.availability_transitions.keys()), set(['enable', 'announce']))

        # Verify that computed attributes exist for the extended instrument
        self.assertIsInstance(extended_instrument.computed.firmware_version, ComputedFloatValue)
        self.assertIsInstance(extended_instrument.computed.last_data_received_datetime, ComputedFloatValue)
        self.assertIsInstance(extended_instrument.computed.last_calibration_datetime, ComputedFloatValue)
        self.assertIsInstance(extended_instrument.computed.uptime, ComputedStringValue)

        self.assertIsInstance(extended_instrument.computed.power_status_roll_up, ComputedIntValue)
        self.assertIsInstance(extended_instrument.computed.communications_status_roll_up, ComputedIntValue)
        self.assertIsInstance(extended_instrument.computed.data_status_roll_up, ComputedIntValue)
        self.assertIsInstance(extended_instrument.computed.location_status_roll_up, ComputedIntValue)

        log.debug("extended_instrument.computed: %s", extended_instrument.computed)

        #check model
        inst_model_obj = self.RR.read(instrument_model_id)
        self.assertEqual(inst_model_obj.name, extended_instrument.instrument_model.name)

        #check agent instance
        inst_agent_instance_obj = self.RR.read(instrument_agent_instance_id)
        self.assertEqual(inst_agent_instance_obj.name, extended_instrument.agent_instance.name)

        #check agent
        inst_agent_obj = self.RR.read(instrument_agent_id)
        #compound assoc return list of lists so check the first element
        self.assertEqual(inst_agent_obj.name, extended_instrument.instrument_agent.name)

        #check platform device
        plat_device_obj = self.RR.read(platform_device_id)
        self.assertEqual(plat_device_obj.name, extended_instrument.platform_device.name)

        extended_platform = self.IMS.get_platform_device_extension(platform_device_id)

        self.assertEqual(1, len(extended_platform.instrument_devices))
        self.assertEqual(instrument_device_id, extended_platform.instrument_devices[0]._id)
        self.assertEqual(1, len(extended_platform.instrument_models))
        self.assertEqual(instrument_model_id, extended_platform.instrument_models[0]._id)
        self.assertEquals(extended_platform.platform_agent._id, platform_agent_id)

        self.assertEquals(len(extended_platform.lcstate_transitions), 5)
        self.assertEquals(set(extended_platform.lcstate_transitions.keys()), set(['develop', 'deploy', 'retire', 'plan', 'integrate']))
        self.assertEquals(len(extended_platform.availability_transitions), 2)
        self.assertEquals(set(extended_platform.availability_transitions.keys()), set(['enable', 'announce']))

        #check sensor devices
        self.assertEqual(1, len(extended_instrument.sensor_devices))

        #check data_product_parameters_set
        self.assertEqual(ComputedValueAvailability.PROVIDED,
                         extended_instrument.computed.data_product_parameters_set.status)
        self.assertTrue( 'Parsed_Canonical' in extended_instrument.computed.data_product_parameters_set.value)
        # the ctd parameters should include 'temp'
        self.assertTrue( 'temp' in extended_instrument.computed.data_product_parameters_set.value['Parsed_Canonical'])

        #none of these will work because there is no agent
        self.assertEqual(ComputedValueAvailability.NOTAVAILABLE,
                         extended_instrument.computed.firmware_version.status)
#        self.assertEqual(ComputedValueAvailability.NOTAVAILABLE,
#                         extended_instrument.computed.operational_state.status)
#        self.assertEqual(ComputedValueAvailability.PROVIDED,
#                         extended_instrument.computed.power_status_roll_up.status)
#        self.assertEqual(ComputedValueAvailability.PROVIDED,
#                         extended_instrument.computed.communications_status_roll_up.status)
#        self.assertEqual(ComputedValueAvailability.PROVIDED,
#                         extended_instrument.computed.data_status_roll_up.status)
#        self.assertEqual(StatusType.STATUS_OK,
#                        extended_instrument.computed.data_status_roll_up.value)
#        self.assertEqual(ComputedValueAvailability.PROVIDED,
#                         extended_instrument.computed.location_status_roll_up.status)

#        self.assertEqual(ComputedValueAvailability.PROVIDED,
#                         extended_instrument.computed.recent_events.status)
#        self.assertEqual([], extended_instrument.computed.recent_events.value)


        # cleanup
        c = DotDict()
        c.resource_registry = self.RR
        self.RR2.pluck(instrument_agent_id)
        self.RR2.pluck(instrument_model_id)
        self.RR2.pluck(instrument_device_id)
        self.RR2.pluck(platform_agent_id)
        self.RR2.pluck(sensor_device_id)
        self.IMS.force_delete_instrument_agent(instrument_agent_id)
        self.IMS.force_delete_instrument_model(instrument_model_id)
        self.IMS.force_delete_instrument_device(instrument_device_id)
        self.IMS.force_delete_platform_agent_instance(platform_agent_instance_id)
        self.IMS.force_delete_platform_agent(platform_agent_id)
        self.IMS.force_delete_platform_device(platform_device_id)
        self.IMS.force_delete_platform_model(platform_model_id)
        self.IMS.force_delete_sensor_device(sensor_device_id)
        self.IMS.force_delete_sensor_model(sensor_model_id)

        #stuff we associate to
        self.RR.delete(data_producer_id)
        self.RR.delete(org_id)



    def test_custom_attributes(self):
        """
        Test assignment of custom attributes
        """

        instrument_model_id, _ =           self.RR.create(any_old(RT.InstrumentModel,
                {"custom_attributes":
                         {"favorite_color": "attr desc goes here"}
            }))
        instrument_device_id, _ =          self.RR.create(any_old(RT.InstrumentDevice,
                {"custom_attributes":
                         {"favorite_color": "red",
                          "bogus_attr": "should raise warning"
                     }
            }))

        self.IMS.assign_instrument_model_to_instrument_device(instrument_model_id, instrument_device_id)

        # cleanup
        self.IMS.force_delete_instrument_device(instrument_device_id)
        self.IMS.force_delete_instrument_model(instrument_model_id)






    def _get_datastore(self, dataset_id):
        dataset = self.DSC.read_dataset(dataset_id)
        datastore_name = dataset.datastore_name
        datastore = self.container.datastore_manager.get_datastore(datastore_name, DataStore.DS_PROFILE.SCIDATA)
        return datastore




    def test_resource_state_save_restore(self):

        # Create InstrumentModel
        instModel_obj = IonObject(RT.InstrumentModel,
                                  name='SBE37IMModel',
                                  description="SBE37IMModel")
        instModel_id = self.IMS.create_instrument_model(instModel_obj)
        log.debug( 'new InstrumentModel id = %s ', instModel_id)

        # Create InstrumentAgent
        raw_config = StreamConfiguration(stream_name='raw', parameter_dictionary_name='ctd_raw_param_dict', records_per_granule=2, granule_publish_rate=5 )
        parsed_config = StreamConfiguration(stream_name='parsed', parameter_dictionary_name='ctd_parsed_param_dict', records_per_granule=2, granule_publish_rate=5 )
        instAgent_obj = IonObject(RT.InstrumentAgent,
                                  name='agent007',
                                  description="SBE37IMAgent",
                                  driver_uri="http://sddevrepo.oceanobservatories.org/releases/seabird_sbe37smb_ooicore-0.0.1-py2.7.egg",
                                  stream_configurations = [raw_config, parsed_config] )
        instAgent_id = self.IMS.create_instrument_agent(instAgent_obj)
        log.debug( 'new InstrumentAgent id = %s', instAgent_id)

        self.IMS.assign_instrument_model_to_instrument_agent(instModel_id, instAgent_id)

        # Create InstrumentDevice
        log.debug('test_activateInstrumentSample: Create instrument resource to represent the SBE37 '
        + '(SA Req: L4-CI-SA-RQ-241) ')
        instDevice_obj = IonObject(RT.InstrumentDevice,
                                   name='SBE37IMDevice',
                                   description="SBE37IMDevice",
                                   serial_number="12345" )
        instDevice_id = self.IMS.create_instrument_device(instrument_device=instDevice_obj)
        self.IMS.assign_instrument_model_to_instrument_device(instModel_id, instDevice_id)

        log.debug("test_activateInstrumentSample: new InstrumentDevice id = %s    (SA Req: L4-CI-SA-RQ-241) ",
                  instDevice_id)

        port_agent_config = {
            'device_addr':  CFG.device.sbe37.host,
            'device_port':  CFG.device.sbe37.port,
            'process_type': PortAgentProcessType.UNIX,
            'binary_path': "port_agent",
            'port_agent_addr': 'localhost',
            'command_port': CFG.device.sbe37.port_agent_cmd_port,
            'data_port': CFG.device.sbe37.port_agent_data_port,
            'log_level': 5,
            'type': PortAgentType.ETHERNET
        }

        instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='SBE37IMAgentInstance',
                                          description="SBE37IMAgentInstance",
                                          port_agent_config = port_agent_config)


        instAgentInstance_id = self.IMS.create_instrument_agent_instance(instAgentInstance_obj,
                                                                               instAgent_id,
                                                                               instDevice_id)

        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()


        spdict_id = self.DSC.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        parsed_stream_def_id = self.PSC.create_stream_definition(name='parsed', parameter_dictionary_id=spdict_id)

        rpdict_id = self.DSC.read_parameter_dictionary_by_name('ctd_raw_param_dict', id_only=True)
        raw_stream_def_id = self.PSC.create_stream_definition(name='raw', parameter_dictionary_id=rpdict_id)


        #-------------------------------
        # Create Raw and Parsed Data Products for the device
        #-------------------------------

        dp_obj = IonObject(RT.DataProduct,
                           name='the parsed data',
                           description='ctd stream test',
                           temporal_domain = tdom,
                           spatial_domain = sdom)

        data_product_id1 = self.DP.create_data_product(data_product=dp_obj,
                                                       stream_definition_id=parsed_stream_def_id)
                                                       
        log.debug( 'new dp_id = %s', data_product_id1)

        self.DAMS.assign_data_product(input_resource_id=instDevice_id, data_product_id=data_product_id1)
        self.DP.activate_data_product_persistence(data_product_id=data_product_id1)



        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.RR.find_objects(data_product_id1, PRED.hasStream, None, True)
        log.debug( 'Data product streams1 = %s', stream_ids)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        dataset_ids, _ = self.RR.find_objects(data_product_id1, PRED.hasDataset, RT.Dataset, True)
        log.debug( 'Data set for data_product_id1 = %s', dataset_ids[0])
        self.parsed_dataset = dataset_ids[0]
        #create the datastore at the beginning of each int test that persists data



        dp_obj = IonObject(RT.DataProduct,
                           name='the raw data',
                           description='raw stream test',
                           temporal_domain = tdom,
                           spatial_domain = sdom)

        data_product_id2 = self.DP.create_data_product(data_product=dp_obj,
                                                       stream_definition_id=raw_stream_def_id)
        log.debug( 'new dp_id = %s', str(data_product_id2))

        self.DAMS.assign_data_product(input_resource_id=instDevice_id, data_product_id=data_product_id2)

        self.DP.activate_data_product_persistence(data_product_id=data_product_id2)

        # spin up agent
        self.IMS.start_instrument_agent_instance(instrument_agent_instance_id=instAgentInstance_id)


        self.addCleanup(self.IMS.stop_instrument_agent_instance,
                        instrument_agent_instance_id=instAgentInstance_id)

        #wait for start
        instance_obj = self.IMS.read_instrument_agent_instance(instAgentInstance_id)
        gate = ProcessStateGate(self.PDC.read_process,
                                instance_obj.agent_process_id,
                                ProcessStateEnum.RUNNING)
        self.assertTrue(gate.await(30), "The instrument agent instance (%s) did not spawn in 30 seconds" %
                                        instance_obj.agent_process_id)


        # take snapshot of config
        snap_id = self.IMS.save_resource_state(instDevice_id, "xyzzy snapshot")
        snap_obj = self.RR.read_attachment(snap_id, include_content=True)
        print "Saved config:"
        print snap_obj.content

        #modify config
        instance_obj.driver_config["comms_config"] = "BAD_DATA"
        self.RR.update(instance_obj)

        #restore config
        self.IMS.restore_resource_state(instDevice_id, snap_id)
        instance_obj = self.RR.read(instAgentInstance_id)
        self.assertNotEqual("BAD_DATA", instance_obj.driver_config["comms_config"])

        
        self.DP.delete_data_product(data_product_id1)
        self.DP.delete_data_product(data_product_id2)


    def test_data_producer(self):
        idevice_id = self.IMS.create_instrument_device(any_old(RT.InstrumentDevice))
        self.assertEqual(1, len(self.RR2.find_data_producer_ids_of_instrument_device_using_has_data_producer(idevice_id)))

        pdevice_id = self.IMS.create_platform_device(any_old(RT.PlatformDevice))
        self.assertEqual(1, len(self.RR2.find_data_producer_ids_of_platform_device_using_has_data_producer(pdevice_id)))


    def test_agent_instance_config(self):
        """
        Verify that agent configurations are being built properly
        """
        clients = DotDict()
        clients.resource_registry  = self.RR
        clients.pubsub_management  = self.PSC
        clients.dataset_management = self.DSC
        pconfig_builder = PlatformAgentConfigurationBuilder(clients)
        iconfig_builder = InstrumentAgentConfigurationBuilder(clients)


        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()

        org_obj = any_old(RT.Org)
        org_id = self.RR2.create(org_obj)

        inst_startup_config = {'startup': 'config'}

        generic_alerts_config = {'lvl1': {'lvl2': 'lvl3val'}}

        required_config_keys = [
            'org_governance_name',
            'device_type',
            'agent',
            'driver_config',
            'stream_config',
            'startup_config',
            'aparam_alert_config',
            'children']



        def verify_instrument_config(config, device_id):
            for key in required_config_keys:
                self.assertIn(key, config)
            self.assertEqual(org_obj.org_governance_name, config['org_governance_name'])
            self.assertEqual(RT.InstrumentDevice, config['device_type'])
            self.assertIn('driver_config', config)
            driver_config = config['driver_config']
            expected_driver_fields = {'process_type': ('ZMQPyClassDriverLauncher',),
                                      }
            for k, v in expected_driver_fields.iteritems():
                self.assertIn(k, driver_config)
                self.assertEqual(v, driver_config[k])
            self.assertEqual

            self.assertEqual({'resource_id': device_id}, config['agent'])
            self.assertEqual(inst_startup_config, config['startup_config'])
            self.assertIn('aparam_alert_config', config)
            self.assertEqual(generic_alerts_config, config['aparam_alert_config'])
            self.assertIn('stream_config', config)
            for key in ['children']:
                self.assertEqual({}, config[key])


        def verify_child_config(config, device_id, inst_device_id=None):
            for key in required_config_keys:
                self.assertIn(key, config)
            self.assertEqual(org_obj.org_governance_name, config['org_governance_name'])
            self.assertEqual(RT.PlatformDevice, config['device_type'])
            self.assertEqual({'resource_id': device_id}, config['agent'])
            self.assertIn('aparam_alert_config', config)
            self.assertEqual(generic_alerts_config, config['aparam_alert_config'])
            self.assertIn('stream_config', config)
            self.assertIn('driver_config', config)
            self.assertIn('foo', config['driver_config'])
            self.assertEqual('bar', config['driver_config']['foo'])
            self.assertIn('process_type', config['driver_config'])
            self.assertEqual(('ZMQPyClassDriverLauncher',), config['driver_config']['process_type'])

            if None is inst_device_id:
                for key in ['children', 'startup_config']:
                    self.assertEqual({}, config[key])
            else:
                for key in ['startup_config']:
                    self.assertEqual({}, config[key])

                self.assertIn(inst_device_id, config['children'])
                verify_instrument_config(config['children'][inst_device_id], inst_device_id)


        def verify_parent_config(config, parent_device_id, child_device_id, inst_device_id=None):
            for key in required_config_keys:
                self.assertIn(key, config)
            self.assertEqual(org_obj.org_governance_name, config['org_governance_name'])
            self.assertEqual(RT.PlatformDevice, config['device_type'])
            self.assertIn('process_type', config['driver_config'])
            self.assertEqual(('ZMQPyClassDriverLauncher',), config['driver_config']['process_type'])
            self.assertEqual({'resource_id': parent_device_id}, config['agent'])
            self.assertIn('aparam_alert_config', config)
            self.assertEqual(generic_alerts_config, config['aparam_alert_config'])
            self.assertIn('stream_config', config)
            for key in ['startup_config']:
                self.assertEqual({}, config[key])

            self.assertIn(child_device_id, config['children'])
            verify_child_config(config['children'][child_device_id], child_device_id, inst_device_id)






        rpdict_id = self.DSC.read_parameter_dictionary_by_name('ctd_raw_param_dict', id_only=True)
        raw_stream_def_id = self.PSC.create_stream_definition(name='raw', parameter_dictionary_id=rpdict_id)
        #todo: create org and figure out which agent resource needs to get assigned to it


        def _make_platform_agent_structure(agent_config=None):
            if None is agent_config: agent_config = {}

            # instance creation
            platform_agent_instance_obj = any_old(RT.PlatformAgentInstance, {'driver_config': {'foo': 'bar'},
                                                                             'alerts': generic_alerts_config})
            platform_agent_instance_obj.agent_config = agent_config
            platform_agent_instance_id = self.IMS.create_platform_agent_instance(platform_agent_instance_obj)

            # agent creation
            raw_config = StreamConfiguration(stream_name='raw', parameter_dictionary_name='ctd_raw_param_dict', records_per_granule=2, granule_publish_rate=5 )
            platform_agent_obj = any_old(RT.PlatformAgent, {"stream_configurations":[raw_config]})
            platform_agent_id = self.IMS.create_platform_agent(platform_agent_obj)

            # device creation
            platform_device_id = self.IMS.create_platform_device(any_old(RT.PlatformDevice))

            # data product creation
            dp_obj = any_old(RT.DataProduct, {"temporal_domain":tdom, "spatial_domain": sdom})
            dp_id = self.DP.create_data_product(data_product=dp_obj, stream_definition_id=raw_stream_def_id)
            self.DAMS.assign_data_product(input_resource_id=platform_device_id, data_product_id=dp_id)
            self.DP.activate_data_product_persistence(data_product_id=dp_id)

            # assignments
            self.RR2.assign_platform_agent_instance_to_platform_device_with_has_agent_instance(platform_agent_instance_id, platform_device_id)
            self.RR2.assign_platform_agent_to_platform_agent_instance_with_has_agent_definition(platform_agent_id, platform_agent_instance_id)
            self.RR2.assign_platform_device_to_org_with_has_resource(platform_agent_instance_id, org_id)

            return platform_agent_instance_id, platform_agent_id, platform_device_id


        def _make_instrument_agent_structure(agent_config=None):
            if None is agent_config: agent_config = {}

            # instance creation
            instrument_agent_instance_obj = any_old(RT.InstrumentAgentInstance, {"startup_config": inst_startup_config,
                                                                                 'alerts': generic_alerts_config})
            instrument_agent_instance_obj.agent_config = agent_config
            instrument_agent_instance_id = self.IMS.create_instrument_agent_instance(instrument_agent_instance_obj)

            # agent creation
            raw_config = StreamConfiguration(stream_name='raw',
                                             parameter_dictionary_name='ctd_raw_param_dict',
                                             records_per_granule=2,
                                             granule_publish_rate=5 )
            instrument_agent_obj = any_old(RT.InstrumentAgent, {"stream_configurations":[raw_config]})
            instrument_agent_id = self.IMS.create_instrument_agent(instrument_agent_obj)

            # device creation
            instrument_device_id = self.IMS.create_instrument_device(any_old(RT.InstrumentDevice))

            # data product creation
            dp_obj = any_old(RT.DataProduct, {"temporal_domain":tdom, "spatial_domain": sdom})
            dp_id = self.DP.create_data_product(data_product=dp_obj, stream_definition_id=raw_stream_def_id)
            self.DAMS.assign_data_product(input_resource_id=instrument_device_id, data_product_id=dp_id)
            self.DP.activate_data_product_persistence(data_product_id=dp_id)

            # assignments
            self.RR2.assign_instrument_agent_instance_to_instrument_device_with_has_agent_instance(instrument_agent_instance_id, instrument_device_id)
            self.RR2.assign_instrument_agent_to_instrument_agent_instance_with_has_agent_definition(instrument_agent_id, instrument_agent_instance_id)
            self.RR2.assign_instrument_device_to_org_with_has_resource(instrument_agent_instance_id, org_id)

            return instrument_agent_instance_id, instrument_agent_id, instrument_device_id



        # can't do anything without an agent instance obj
        log.debug("Testing that preparing a launcher without agent instance raises an error")
        pconfig_builder._update_cached_predicates() # associations have changed since builder was instantiated
        self.assertRaises(AssertionError, pconfig_builder.prepare, will_launch=False)

        log.debug("Making the structure for a platform agent, which will be the child")
        platform_agent_instance_child_id, _, platform_device_child_id  = _make_platform_agent_structure()
        platform_agent_instance_child_obj = self.RR2.read(platform_agent_instance_child_id)

        log.debug("Preparing a valid agent instance launch, for config only")
        pconfig_builder._update_cached_predicates() # associations have changed since builder was instantiated
        pconfig_builder.set_agent_instance_object(platform_agent_instance_child_obj)
        child_config = pconfig_builder.prepare(will_launch=False)
        verify_child_config(child_config, platform_device_child_id)


        log.debug("Making the structure for a platform agent, which will be the parent")
        platform_agent_instance_parent_id, _, platform_device_parent_id  = _make_platform_agent_structure()
        platform_agent_instance_parent_obj = self.RR2.read(platform_agent_instance_parent_id)

        log.debug("Testing child-less parent as a child config")
        pconfig_builder._update_cached_predicates() # associations have changed since builder was instantiated
        pconfig_builder.set_agent_instance_object(platform_agent_instance_parent_obj)
        parent_config = pconfig_builder.prepare(will_launch=False)
        verify_child_config(parent_config, platform_device_parent_id)

        log.debug("assigning child platform to parent")
        self.RR2.assign_platform_device_to_platform_device_with_has_device(platform_device_child_id,
                                                                           platform_device_parent_id)
        child_device_ids = self.RR2.find_platform_device_ids_of_device_using_has_device(platform_device_parent_id)
        self.assertNotEqual(0, len(child_device_ids))

        log.debug("Testing parent + child as parent config")
        pconfig_builder._update_cached_predicates() # associations have changed since builder was instantiated
        pconfig_builder.set_agent_instance_object(platform_agent_instance_parent_obj)
        parent_config = pconfig_builder.prepare(will_launch=False)
        verify_parent_config(parent_config, platform_device_parent_id, platform_device_child_id)


        log.debug("making the structure for an instrument agent")
        instrument_agent_instance_id, _, instrument_device_id = _make_instrument_agent_structure()
        instrument_agent_instance_obj = self.RR2.read(instrument_agent_instance_id)

        log.debug("Testing instrument config")
        iconfig_builder._update_cached_predicates() # associations have changed since builder was instantiated
        iconfig_builder.set_agent_instance_object(instrument_agent_instance_obj)
        instrument_config = iconfig_builder.prepare(will_launch=False)
        verify_instrument_config(instrument_config, instrument_device_id)

        log.debug("assigning instrument to platform")
        self.RR2.assign_instrument_device_to_platform_device_with_has_device(instrument_device_id,
                                                                             platform_device_child_id)
        child_device_ids = self.RR2.find_instrument_device_ids_of_device_using_has_device(platform_device_child_id)
        self.assertNotEqual(0, len(child_device_ids))

        log.debug("Testing entire config")
        pconfig_builder._update_cached_predicates() # associations have changed since builder was instantiated
        pconfig_builder.set_agent_instance_object(platform_agent_instance_parent_obj)
        full_config = pconfig_builder.prepare(will_launch=False)
        verify_parent_config(full_config, platform_device_parent_id, platform_device_child_id, instrument_device_id)

        #self.fail(parent_config)
        #plauncher.prepare(will_launch=False)


    @attr('PREP')
    def test_prepare_resource_support(self):
        """
        create one of each resource and association used by IMS
        to guard against problems in ion-definitions
        """

        #stuff we control
        instrument_agent_instance_id, _ =  self.RR.create(any_old(RT.InstrumentAgentInstance))
        instrument_agent_id, _ =           self.RR.create(any_old(RT.InstrumentAgent))
        instrument_model_id, _ =           self.RR.create(any_old(RT.InstrumentModel))
        instrument_device_id, _ =          self.RR.create(any_old(RT.InstrumentDevice))
        platform_agent_instance_id, _ =    self.RR.create(any_old(RT.PlatformAgentInstance))
        platform_agent_id, _ =             self.RR.create(any_old(RT.PlatformAgent))
        platform_device_id, _ =            self.RR.create(any_old(RT.PlatformDevice))
        platform_model_id, _ =             self.RR.create(any_old(RT.PlatformModel))
        sensor_device_id, _ =              self.RR.create(any_old(RT.SensorDevice))
        sensor_model_id, _ =               self.RR.create(any_old(RT.SensorModel))

        #stuff we associate to
        data_producer_id, _      = self.RR.create(any_old(RT.DataProducer))
        org_id, _ =                self.RR.create(any_old(RT.Org))

        #instrument_agent_instance_id #is only a target

        #instrument_agent
        self.RR.create_association(instrument_agent_id, PRED.hasModel, instrument_model_id)
        self.RR.create_association(instrument_agent_instance_id, PRED.hasAgentDefinition, instrument_agent_id)

        #instrument_device
        self.RR.create_association(instrument_device_id, PRED.hasModel, instrument_model_id)
        self.RR.create_association(instrument_device_id, PRED.hasAgentInstance, instrument_agent_instance_id)
        self.RR.create_association(instrument_device_id, PRED.hasDataProducer, data_producer_id)
        self.RR.create_association(instrument_device_id, PRED.hasDevice, sensor_device_id)
        self.RR.create_association(org_id, PRED.hasResource, instrument_device_id)


        instrument_model_id #is only a target

        platform_agent_instance_id #is only a target

        #platform_agent
        self.RR.create_association(platform_agent_id, PRED.hasModel, platform_model_id)
        self.RR.create_association(platform_agent_instance_id, PRED.hasAgentDefinition, platform_agent_id)

        #platform_device
        self.RR.create_association(platform_device_id, PRED.hasModel, platform_model_id)
        self.RR.create_association(platform_device_id, PRED.hasAgentInstance, platform_agent_instance_id)
        self.RR.create_association(platform_device_id, PRED.hasDevice, instrument_device_id)

        platform_model_id #is only a target

        #sensor_device
        self.RR.create_association(sensor_device_id, PRED.hasModel, sensor_model_id)
        self.RR.create_association(sensor_device_id, PRED.hasDevice, instrument_device_id)

        sensor_model_id #is only a target

        #create a parsed product for this instrument output
        tdom, sdom = time_series_domain()
        tdom = tdom.dump()
        sdom = sdom.dump()
        dp_obj = IonObject(RT.DataProduct,
            name='the parsed data',
            description='ctd stream test',
            processing_level_code='Parsed_Canonical',
            temporal_domain = tdom,
            spatial_domain = sdom)
        pdict_id = self.DSC.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        parsed_stream_def_id = self.PSC.create_stream_definition(name='parsed', parameter_dictionary_id=pdict_id)
        data_product_id1 = self.DP.create_data_product(data_product=dp_obj, stream_definition_id=parsed_stream_def_id)
        log.debug( 'new dp_id = %s', data_product_id1)

        self.DAMS.assign_data_product(input_resource_id=instrument_device_id, data_product_id=data_product_id1)


        def addInstOwner(inst_id, subject):

            actor_identity_obj = any_old(RT.ActorIdentity, {"name": subject})
            user_id = self.IDS.create_actor_identity(actor_identity_obj)
            user_info_obj = any_old(RT.UserInfo)
            user_info_id = self.IDS.create_user_info(user_id, user_info_obj)

            self.RR.create_association(inst_id, PRED.hasOwner, user_id)


        #Testing multiple instrument owners
        addInstOwner(instrument_device_id, "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Roger Unwin A254")
        addInstOwner(instrument_device_id, "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Bob Cumbers A256")

        def ion_object_encoder(obj):
            return obj.__dict__


        #First call to create
        instrument_data = self.IMS.prepare_instrument_device_support()

        self.assertEqual(instrument_data._id, '')
        self.assertEqual(instrument_data.type_, OT.InstrumentDevicePrepareSupport)
        self.assertEqual(len(instrument_data.instrument_models), 1)
        self.assertEqual(instrument_data.instrument_models[0]._id, instrument_model_id)
        self.assertEqual(len(instrument_data.instrument_agents), 1)
        self.assertEqual(instrument_data.instrument_agents[0]._id, instrument_agent_id)
        self.assertEqual(len(instrument_data.instrument_device_model), 0)
        self.assertEqual(len(instrument_data.instrument_agent_models), 1)
        self.assertEqual(instrument_data.instrument_agent_models[0].o, instrument_model_id)
        self.assertEqual(instrument_data.instrument_agent_models[0].s, instrument_agent_id)
        self.assertEqual(len(instrument_data.sensor_devices), 1)
        self.assertEqual(instrument_data.sensor_devices[0]._id, sensor_device_id)


        #Next call to update
        instrument_data = self.IMS.prepare_instrument_device_support(instrument_device_id)

        self.assertEqual(instrument_data._id, instrument_device_id)
        self.assertEqual(instrument_data.type_, OT.InstrumentDevicePrepareSupport)
        self.assertEqual(len(instrument_data.instrument_models), 1)
        self.assertEqual(instrument_data.instrument_models[0]._id, instrument_model_id)
        self.assertEqual(len(instrument_data.instrument_agents), 1)
        self.assertEqual(instrument_data.instrument_agents[0]._id, instrument_agent_id)
        self.assertEqual(len(instrument_data.instrument_device_model), 1)
        self.assertEqual(instrument_data.instrument_device_model[0].s, instrument_device_id)
        self.assertEqual(instrument_data.instrument_device_model[0].o, instrument_model_id)
        self.assertEqual(len(instrument_data.instrument_agent_models), 1)
        self.assertEqual(instrument_data.instrument_agent_models[0].o, instrument_model_id)
        self.assertEqual(instrument_data.instrument_agent_models[0].s, instrument_agent_id)
        self.assertEqual(len(instrument_data.sensor_devices), 1)
        self.assertEqual(instrument_data.sensor_devices[0]._id, sensor_device_id)
        self.assertEqual(instrument_data.assign_instrument_model_request.request_parameters['instrument_device_id'], instrument_device_id)


        platform_data = self.IMS.prepare_platform_device_support()

        self.assertEqual(platform_data._id, '')
        self.assertEqual(platform_data.type_, OT.PlatformDevicePrepareSupport)
        self.assertEqual(len(platform_data.platform_models), 1)
        self.assertEqual(platform_data.platform_models[0]._id, platform_model_id)
        self.assertEqual(len(platform_data.platform_agents), 1)
        self.assertEqual(platform_data.platform_agents[0]._id, platform_agent_id)
        self.assertEqual(len(platform_data.platform_device_model), 0)
        self.assertEqual(len(platform_data.platform_agent_models), 1)
        self.assertEqual(platform_data.platform_agent_models[0].o, platform_model_id)
        self.assertEqual(platform_data.platform_agent_models[0].s, platform_agent_id)
        self.assertEqual(len(platform_data.instrument_devices), 1)
        self.assertEqual(platform_data.instrument_devices[0]._id, instrument_device_id)


        platform_data = self.IMS.prepare_platform_device_support(platform_device_id)

        #print simplejson.dumps(platform_data, default=ion_object_encoder, indent= 2)


        self.assertEqual(platform_data._id, platform_device_id)
        self.assertEqual(platform_data.type_, OT.PlatformDevicePrepareSupport)
        self.assertEqual(len(platform_data.platform_models), 1)
        self.assertEqual(platform_data.platform_models[0]._id, platform_model_id)
        self.assertEqual(len(platform_data.platform_agents), 1)
        self.assertEqual(platform_data.platform_agents[0]._id, platform_agent_id)
        self.assertEqual(len(platform_data.platform_device_model), 1)
        self.assertEqual(platform_data.platform_device_model[0].s, platform_device_id)
        self.assertEqual(platform_data.platform_device_model[0].o, platform_model_id)
        self.assertEqual(len(platform_data.platform_agent_models), 1)
        self.assertEqual(platform_data.platform_agent_models[0].o, platform_model_id)
        self.assertEqual(platform_data.platform_agent_models[0].s, platform_agent_id)
        self.assertEqual(len(platform_data.instrument_devices), 1)
        self.assertEqual(platform_data.instrument_devices[0]._id, instrument_device_id)
        self.assertEqual(platform_data.assign_platform_model_request.request_parameters['platform_device_id'], platform_device_id)

        # cleanup
        c = DotDict()
        c.resource_registry = self.RR
        self.RR2.pluck(instrument_agent_id)
        self.RR2.pluck(instrument_model_id)
        self.RR2.pluck(instrument_device_id)
        self.RR2.pluck(platform_agent_id)
        self.RR2.pluck(sensor_device_id)
        self.IMS.force_delete_instrument_agent(instrument_agent_id)
        self.IMS.force_delete_instrument_model(instrument_model_id)
        self.IMS.force_delete_instrument_device(instrument_device_id)
        self.IMS.force_delete_platform_agent_instance(platform_agent_instance_id)
        self.IMS.force_delete_platform_agent(platform_agent_id)
        self.IMS.force_delete_platform_device(platform_device_id)
        self.IMS.force_delete_platform_model(platform_model_id)
        self.IMS.force_delete_sensor_device(sensor_device_id)
        self.IMS.force_delete_sensor_model(sensor_model_id)

        #stuff we associate to
        self.RR.delete(data_producer_id)
        self.RR.delete(org_id)
Beispiel #7
0
class TestPlatformInstrument(BaseIntTestPlatform):
    def setUp(self):
        self._start_container()

        self._pp = pprint.PrettyPrinter()

        log.debug("oms_uri = %s", OMS_URI)
        self.oms = CIOMSClientFactory.create_instance(OMS_URI)

        #url = OmsTestMixin.start_http_server()
        #log.debug("TestPlatformInstrument:setup http url %s", url)
        #
        #result = self.oms.event.register_event_listener(url)
        #log.debug("TestPlatformInstrument:setup register_event_listener result %s", result)

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.pubsubclient = PubsubManagementServiceClient(
            node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(
            node=self.container.node)
        self.omsclient = ObservatoryManagementServiceClient(
            node=self.container.node)
        self.datasetclient = DatasetManagementServiceClient(
            node=self.container.node)
        self.processdispatchclient = ProcessDispatcherServiceClient(
            node=self.container.node)
        self.dpclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(
            node=self.container.node)
        self.dataset_management = DatasetManagementServiceClient()
        self.RR2 = EnhancedResourceRegistryClient(self.rrclient)

        self.org_id = self.RR2.create(any_old(RT.Org))
        log.debug("Org created: %s", self.org_id)

        # see _set_receive_timeout
        self._receive_timeout = 300

        self.instrument_device_id = ''
        self.platform_device_id = ''
        self.platform_site_id = ''
        self.platform_agent_instance_id = ''
        self._pa_client = ''

        def done():
            CIOMSClientFactory.destroy_instance(self.oms)
            event_notifications = OmsTestMixin.stop_http_server()
            log.info("event_notifications = %s" % str(event_notifications))

        self.addCleanup(done)

    @unittest.skip('Must be run locally...')
    def test_platform_with_instrument_streaming(self):
        #
        # The following is with just a single platform and the single
        # instrument "SBE37_SIM_08", which corresponds to the one on port 4008.
        #

        #load the paramaters and the param dicts necesssary for the VEL3D
        log.debug(
            "load params------------------------------------------------------------------------------"
        )
        self._load_params()

        log.debug(
            " _register_oms_listener------------------------------------------------------------------------------"
        )
        self._register_oms_listener()

        #create the instrument device/agent/mode
        log.debug("---------- create_instrument_resources ----------")
        self._create_instrument_resources()

        #create the platform device, agent and instance
        log.debug("---------- create_platform_configuration ----------")
        self._create_platform_configuration('LPJBox_CI_Ben_Hall')

        self.rrclient.create_association(subject=self.platform_device_id,
                                         predicate=PRED.hasDevice,
                                         object=self.instrument_device_id)

        log.debug("---------- start_platform ----------")
        self._start_platform()
        self.addCleanup(self._stop_platform)

        # get everything in command mode:
        self._ping_agent()
        log.debug(" ---------- initialize ----------")
        self._initialize()

        _ia_client = ResourceAgentClient(self.instrument_device_id,
                                         process=FakeProcess())
        state = _ia_client.get_agent_state()
        log.info("TestPlatformInstrument get_agent_state %s", state)

        log.debug(" ---------- go_active ----------")
        self._go_active()
        state = _ia_client.get_agent_state()
        log.info("TestPlatformInstrument get_agent_state %s", state)

        log.debug("---------- run ----------")
        self._run()

        gevent.sleep(2)

        log.debug(" ---------- _start_resource_monitoring ----------")
        self._start_resource_monitoring()
        gevent.sleep(2)
        #
        #        # verify the instrument is command state:
        #        state = ia_client.get_agent_state()
        #        log.debug(" TestPlatformInstrument get_agent_state: %s", state)
        #        self.assertEqual(state, ResourceAgentState.COMMAND)  _stop_resource_monitoring

        log.debug(" ---------- _stop_resource_monitoring ----------")
        self._stop_resource_monitoring()
        gevent.sleep(2)

        log.debug(" ---------- go_inactive ----------")
        self._go_inactive()
        state = _ia_client.get_agent_state()
        log.info("TestPlatformInstrument get_agent_state %s", state)

        self._reset()
        self._shutdown()

    def _get_platform_attributes(self):
        log.debug(" ----------get_platform_attributes ----------")
        attr_infos = self.oms.attr.get_platform_attributes(
            'LPJBox_CI_Ben_Hall')
        log.debug('_get_platform_attributes: %s', self._pp.pformat(attr_infos))

        attrs = attr_infos['LPJBox_CI_Ben_Hall']
        for attrid, arrinfo in attrs.iteritems():
            arrinfo['attr_id'] = attrid

        log.debug('_get_platform_attributes: %s', self._pp.pformat(attrs))
        return attrs

    def _load_params(self):

        log.info(" ---------- load_params ----------")
        # load_parameter_scenarios
        self.container.spawn_process(
            "Loader",
            "ion.processes.bootstrap.ion_loader",
            "IONLoader",
            config=dict(
                op="load",
                scenario="BETA",
                path="master",
                categories=
                "ParameterFunctions,ParameterDefs,ParameterDictionary,StreamDefinition",
                clearcols="owner_id,org_ids",
                assets="res/preload/r2_ioc/ooi_assets",
                parseooi="True",
            ))

    def _create_platform_configuration(self,
                                       platform_id,
                                       parent_platform_id=None):
        """
        This method is an adaptation of test_agent_instance_config in
        test_instrument_management_service_integration.py

        @param platform_id
        @param parent_platform_id
        @return a DotDict with various of the constructed elements associated
                to the platform.
        """

        param_dict_name = 'platform_eng_parsed'
        parsed_rpdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            param_dict_name, id_only=True)
        self.parsed_stream_def_id = self.pubsubclient.create_stream_definition(
            name='parsed', parameter_dictionary_id=parsed_rpdict_id)

        driver_config = PLTFRM_DVR_CONFIG
        driver_config['attributes'] = self._get_platform_attributes(
        )  #self._platform_attributes[platform_id]

        #OMS returning an error for port.get_platform_ports
        #driver_config['ports']      = self._platform_ports[platform_id]
        log.debug("driver_config: %s", driver_config)

        # instance creation
        platform_agent_instance_obj = any_old(RT.PlatformAgentInstance,
                                              {'driver_config': driver_config})

        platform_agent_instance_obj.agent_config = {
            'platform_config': {
                'platform_id': 'LPJBox_CI_Ben_Hall',
                'parent_platform_id': None
            }
        }

        self.platform_agent_instance_id = self.imsclient.create_platform_agent_instance(
            platform_agent_instance_obj)

        # agent creation
        platform_agent_obj = any_old(
            RT.PlatformAgent, {
                "stream_configurations": self._get_platform_stream_configs(),
                'driver_module': PLTFRM_DVR_MOD,
                'driver_class': PLTFRM_DVR_CLS
            })
        platform_agent_id = self.imsclient.create_platform_agent(
            platform_agent_obj)

        # device creation
        self.platform_device_id = self.imsclient.create_platform_device(
            any_old(RT.PlatformDevice))

        # data product creation
        dp_obj = any_old(RT.DataProduct)
        dp_id = self.dpclient.create_data_product(
            data_product=dp_obj,
            stream_definition_id=self.parsed_stream_def_id)
        self.damsclient.assign_data_product(
            input_resource_id=self.platform_device_id, data_product_id=dp_id)
        self.dpclient.activate_data_product_persistence(data_product_id=dp_id)
        self.addCleanup(self.dpclient.delete_data_product, dp_id)

        # assignments
        self.RR2.assign_platform_agent_instance_to_platform_device_with_has_agent_instance(
            self.platform_agent_instance_id, self.platform_device_id)
        self.RR2.assign_platform_agent_to_platform_agent_instance_with_has_agent_definition(
            platform_agent_id, self.platform_agent_instance_id)
        self.RR2.assign_platform_device_to_org_with_has_resource(
            self.platform_agent_instance_id, self.org_id)

        #######################################
        # dataset

        log.debug('data product = %s', dp_id)

        stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStream, None,
                                                   True)
        log.debug('Data product stream_ids = %s', stream_ids)
        stream_id = stream_ids[0]

        # Retrieve the id of the OUTPUT stream from the out Data Product
        dataset_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasDataset,
                                                    RT.Dataset, True)
        log.debug('Data set for data_product_id1 = %s', dataset_ids[0])
        #######################################

        log.debug(
            '_create_platform_site_and_deployment  platform_device_id: %s',
            self.platform_device_id)

        site_object = IonObject(RT.PlatformSite, name='PlatformSite1')
        self.platform_site_id = self.omsclient.create_platform_site(
            platform_site=site_object, parent_id='')
        log.debug('_create_platform_site_and_deployment  site id: %s',
                  self.platform_site_id)

        #create supporting objects for the Deployment resource
        # 1. temporal constraint
        # find current deployment using time constraints
        current_time = int(calendar.timegm(time.gmtime()))
        # two years on either side of current time
        start = current_time - 63115200
        end = current_time + 63115200
        temporal_bounds = IonObject(OT.TemporalBounds,
                                    name='planned',
                                    start_datetime=str(start),
                                    end_datetime=str(end))
        # 2. PlatformPort object which defines device to port map
        platform_port_obj = IonObject(
            OT.PlatformPort,
            reference_designator='GA01SUMO-FI003-01-CTDMO0999',
            port_type=PortTypeEnum.UPLINK,
            ip_address='0')

        # now create the Deployment
        deployment_obj = IonObject(
            RT.Deployment,
            name='TestPlatformDeployment',
            description='some new deployment',
            context=IonObject(OT.CabledNodeDeploymentContext),
            constraint_list=[temporal_bounds],
            port_assignments={self.platform_device_id: platform_port_obj})

        platform_deployment_id = self.omsclient.create_deployment(
            deployment=deployment_obj,
            site_id=self.platform_site_id,
            device_id=self.platform_device_id)
        log.debug('_create_platform_site_and_deployment  deployment_id: %s',
                  platform_deployment_id)

        deploy_obj2 = self.omsclient.read_deployment(platform_deployment_id)
        log.debug('_create_platform_site_and_deployment  deploy_obj2 : %s',
                  deploy_obj2)
        return self.platform_site_id, platform_deployment_id

    def _create_instrument_resources(self):
        # Create InstrumentModel
        instModel_obj = IonObject(RT.InstrumentModel,
                                  name='VEL3D',
                                  description="VEL3D")
        instModel_id = self.imsclient.create_instrument_model(instModel_obj)
        log.debug('new InstrumentModel id = %s ', instModel_id)

        raw_config = StreamConfiguration(stream_name='raw',
                                         parameter_dictionary_name='raw')
        vel3d_b_sample = StreamConfiguration(
            stream_name='vel3d_b_sample',
            parameter_dictionary_name='vel3d_b_sample')
        vel3d_b_engineering = StreamConfiguration(
            stream_name='vel3d_b_engineering',
            parameter_dictionary_name='vel3d_b_engineering')

        # Create InstrumentAgent
        instAgent_obj = IonObject(
            RT.InstrumentAgent,
            name='agent007',
            description="SBE37IMAgent",
            driver_uri=
            "http://sddevrepo.oceanobservatories.org/releases/nobska_mavs4_ooicore-0.0.7-py2.7.egg",
            stream_configurations=[
                raw_config, vel3d_b_sample, vel3d_b_engineering
            ])
        instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj)
        log.debug('new InstrumentAgent id = %s', instAgent_id)

        self.imsclient.assign_instrument_model_to_instrument_agent(
            instModel_id, instAgent_id)

        # Create InstrumentDevice
        instDevice_obj = IonObject(RT.InstrumentDevice,
                                   name='VEL3DDevice',
                                   description="VEL3DDevice",
                                   serial_number="12345")
        self.instrument_device_id = self.imsclient.create_instrument_device(
            instrument_device=instDevice_obj)
        self.imsclient.assign_instrument_model_to_instrument_device(
            instModel_id, self.instrument_device_id)

        port_agent_config = {
            'device_addr': '10.180.80.6',
            'device_port': 2101,
            'process_type': PortAgentProcessType.UNIX,
            'binary_path': "port_agent",
            'port_agent_addr': 'localhost',
            'command_port': 1025,
            'data_port': 1026,
            'log_level': 5,
            'type': PortAgentType.ETHERNET
        }

        instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance,
                                          name='VEL3DAgentInstance',
                                          description="VEL3DAgentInstance",
                                          port_agent_config=port_agent_config,
                                          alerts=[])

        instAgentInstance_id = self.imsclient.create_instrument_agent_instance(
            instAgentInstance_obj, instAgent_id, self.instrument_device_id)
        self._start_port_agent(
            self.imsclient.read_instrument_agent_instance(
                instAgentInstance_id))

        vel3d_b_sample_pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            'vel3d_b_sample', id_only=True)
        vel3d_b_sample_stream_def_id = self.pubsubclient.create_stream_definition(
            name='vel3d_b_sample',
            parameter_dictionary_id=vel3d_b_sample_pdict_id)

        vel3d_b_engineering_pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            'vel3d_b_engineering', id_only=True)
        vel3d_b_engineering_stream_def_id = self.pubsubclient.create_stream_definition(
            name='vel3d_b_engineering',
            parameter_dictionary_id=vel3d_b_engineering_pdict_id)

        raw_pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            'raw', id_only=True)
        raw_stream_def_id = self.pubsubclient.create_stream_definition(
            name='raw', parameter_dictionary_id=raw_pdict_id)

        #-------------------------------
        # Create Raw and Parsed Data Products for the device
        #-------------------------------
        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()

        dp_obj = IonObject(RT.DataProduct,
                           name='vel3d_b_sample',
                           description='vel3d_b_sample')

        data_product_id1 = self.dpclient.create_data_product(
            data_product=dp_obj,
            stream_definition_id=vel3d_b_sample_stream_def_id)
        self.damsclient.assign_data_product(
            input_resource_id=self.instrument_device_id,
            data_product_id=data_product_id1)
        self.dpclient.activate_data_product_persistence(
            data_product_id=data_product_id1)

        dp_obj = IonObject(RT.DataProduct,
                           name='vel3d_b_engineering',
                           description='vel3d_b_engineering')

        data_product_id2 = self.dpclient.create_data_product(
            data_product=dp_obj,
            stream_definition_id=vel3d_b_engineering_stream_def_id)
        self.damsclient.assign_data_product(
            input_resource_id=self.instrument_device_id,
            data_product_id=data_product_id2)
        self.dpclient.activate_data_product_persistence(
            data_product_id=data_product_id2)

        dp_obj = IonObject(RT.DataProduct,
                           name='the raw data',
                           description='raw stream test')

        data_product_id3 = self.dpclient.create_data_product(
            data_product=dp_obj, stream_definition_id=raw_stream_def_id)
        self.damsclient.assign_data_product(
            input_resource_id=self.instrument_device_id,
            data_product_id=data_product_id3)
        self.dpclient.activate_data_product_persistence(
            data_product_id=data_product_id3)

        #create instrument site and associated deployment
        site_object = IonObject(RT.InstrumentSite, name='InstrumentSite1')
        instrument_site_id = self.omsclient.create_instrument_site(
            instrument_site=site_object, parent_id=self.platform_site_id)
        log.debug('_create_instrument_site_and_deployment  site id: %s',
                  instrument_site_id)

        #create supporting objects for the Deployment resource
        # 1. temporal constraint
        # find current deployment using time constraints
        current_time = int(calendar.timegm(time.gmtime()))
        # two years on either side of current time
        start = current_time - 63115200
        end = current_time + 63115200
        temporal_bounds = IonObject(OT.TemporalBounds,
                                    name='planned',
                                    start_datetime=str(start),
                                    end_datetime=str(end))
        # 2. PlatformPort object which defines device to port map
        platform_port_obj = IonObject(
            OT.PlatformPort,
            reference_designator='GA01SUMO-FI003-03-CTDMO0999',
            port_type=PortTypeEnum.PAYLOAD,
            ip_address='0')

        # now create the Deployment
        deployment_obj = IonObject(
            RT.Deployment,
            name='TestInstrumentDeployment',
            description='some new deployment',
            context=IonObject(OT.CabledInstrumentDeploymentContext),
            constraint_list=[temporal_bounds],
            port_assignments={self.instrument_device_id: platform_port_obj})

        instrument_deployment_id = self.omsclient.create_deployment(
            deployment=deployment_obj,
            site_id=instrument_site_id,
            device_id=self.instrument_device_id)
        log.debug('_create_instrument_site_and_deployment  deployment_id: %s',
                  instrument_deployment_id)

    def _start_port_agent(self, instrument_agent_instance_obj=None):
        """
        Construct and start the port agent, ONLY NEEDED FOR INSTRUMENT AGENTS.
        """

        _port_agent_config = instrument_agent_instance_obj.port_agent_config

        # It blocks until the port agent starts up or a timeout
        _pagent = PortAgentProcess.launch_process(_port_agent_config,
                                                  test_mode=True)
        pid = _pagent.get_pid()
        port = _pagent.get_data_port()
        cmd_port = _pagent.get_command_port()
        log.info(
            "IMS:_start_pagent returned from PortAgentProcess.launch_process pid: %s ",
            pid)

        # Hack to get ready for DEMO.  Further though needs to be put int
        # how we pass this config info around.
        host = 'localhost'

        driver_config = instrument_agent_instance_obj.driver_config
        comms_config = driver_config.get('comms_config')
        if comms_config:
            host = comms_config.get('addr')
        else:
            log.warn("No comms_config specified, using '%s'" % host)

        # Configure driver to use port agent port number.
        instrument_agent_instance_obj.driver_config['comms_config'] = {
            'addr': host,
            'cmd_port': cmd_port,
            'port': port
        }
        instrument_agent_instance_obj.driver_config['pagent_pid'] = pid
        self.imsclient.update_instrument_agent_instance(
            instrument_agent_instance_obj)
        return self.imsclient.read_instrument_agent_instance(
            instrument_agent_instance_obj._id)

    def _start_platform(self):
        """
        Starts the given platform waiting for it to transition to the
        UNINITIALIZED state (note that the agent starts in the LAUNCHING state).

        More in concrete the sequence of steps here are:
        - prepares subscriber to receive the UNINITIALIZED state transition
        - launches the platform process
        - waits for the start of the process
        - waits for the transition to the UNINITIALIZED state
        """
        ##############################################################
        # prepare to receive the UNINITIALIZED state transition:
        async_res = AsyncResult()

        def consume_event(evt, *args, **kwargs):
            log.debug("Got ResourceAgentStateEvent %s from origin %r",
                      evt.state, evt.origin)
            if evt.state == PlatformAgentState.UNINITIALIZED:
                async_res.set(evt)

        # start subscriber:
        sub = EventSubscriber(event_type="ResourceAgentStateEvent",
                              origin=self.platform_device_id,
                              callback=consume_event)
        sub.start()
        log.info(
            "registered event subscriber to wait for state=%r from origin %r",
            PlatformAgentState.UNINITIALIZED, self.platform_device_id)
        #self._event_subscribers.append(sub)
        sub._ready_event.wait(timeout=EVENT_TIMEOUT)

        ##############################################################
        # now start the platform:
        agent_instance_id = self.platform_agent_instance_id
        log.debug("about to call start_platform_agent_instance with id=%s",
                  agent_instance_id)
        pid = self.imsclient.start_platform_agent_instance(
            platform_agent_instance_id=agent_instance_id)
        log.debug("start_platform_agent_instance returned pid=%s", pid)

        #wait for start
        agent_instance_obj = self.imsclient.read_platform_agent_instance(
            agent_instance_id)
        gate = AgentProcessStateGate(self.processdispatchclient.read_process,
                                     self.platform_device_id,
                                     ProcessStateEnum.RUNNING)
        self.assertTrue(
            gate. await (90),
            "The platform agent instance did not spawn in 90 seconds")

        # Start a resource agent client to talk with the agent.
        self._pa_client = ResourceAgentClient(self.platform_device_id,
                                              name=gate.process_id,
                                              process=FakeProcess())
        log.debug("got platform agent client %s", str(self._pa_client))

        ##############################################################
        # wait for the UNINITIALIZED event:
        async_res.get(timeout=self._receive_timeout)

    def _register_oms_listener(self):

        #load the paramaters and the param dicts necesssary for the VEL3D
        log.debug("---------- connect_to_oms ---------- ")
        log.debug("oms_uri = %s", OMS_URI)
        self.oms = CIOMSClientFactory.create_instance(OMS_URI)

        #buddha url
        url = "http://10.22.88.168:5000/ion-service/oms_event"
        log.info("test_oms_events_receive:setup http url %s", url)

        result = self.oms.event.register_event_listener(url)
        log.debug("_register_oms_listener register_event_listener result %s",
                  result)

        #-------------------------------------------------------------------------------------
        # Set up the subscriber to catch the alert event
        #-------------------------------------------------------------------------------------

        def callback_for_alert(event, *args, **kwargs):
            log.debug("caught an OMSDeviceStatusEvent: %s", event)
            self.catch_alert.put(event)

        self.event_subscriber = EventSubscriber(
            event_type='OMSDeviceStatusEvent', callback=callback_for_alert)

        self.event_subscriber.start()
        self.addCleanup(self.event_subscriber.stop)

        result = self.oms.event.generate_test_event({
            'platform_id': 'fake_platform_id',
            'message':
            "fake event triggered from CI using OMS' generate_test_event",
            'severity': '3',
            'group ': 'power'
        })
        log.debug("_register_oms_listener generate_test_event result %s",
                  result)

    def _stop_platform(self):
        try:
            self.IMS.stop_platform_agent_instance(
                self.platform_agent_instance_id)
        except Exception:
            log.warn(
                "platform_id=%r: Exception in IMS.stop_platform_agent_instance with "
                "platform_agent_instance_id = %r. Perhaps already dead.",
                self.platform_device_id, self.platform_agent_instance_id)
Beispiel #8
0
class TestPlatformInstrument(BaseIntTestPlatform):
    def setUp(self):
        self._start_container()

        self._pp = pprint.PrettyPrinter()

        log.debug("oms_uri = %s", OMS_URI)
        self.oms = CIOMSClientFactory.create_instance(OMS_URI)

        self._get_platform_attributes()

        url = OmsTestMixin.start_http_server()
        log.info("TestPlatformInstrument:setup http url %s", url)

        result = self.oms.event.register_event_listener(url)
        log.info(
            "TestPlatformInstrument:setup register_event_listener result %s",
            result)

        #        response = self.oms.port.get_platform_ports('LPJBox_CI_Ben_Hall')
        #        log.info("TestPlatformInstrument:setup get_platform_ports %s", response)

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.pubsubclient = PubsubManagementServiceClient(
            node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(
            node=self.container.node)
        self.datasetclient = DatasetManagementServiceClient(
            node=self.container.node)
        self.processdispatchclient = ProcessDispatcherServiceClient(
            node=self.container.node)
        self.dpclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(
            node=self.container.node)
        self.dataset_management = DatasetManagementServiceClient()
        self.RR2 = EnhancedResourceRegistryClient(self.rrclient)

        self.org_id = self.RR2.create(any_old(RT.Org))
        log.debug("Org created: %s", self.org_id)

        # see _set_receive_timeout
        self._receive_timeout = 177

        self.instrument_device = ''
        self.platform_device = ''
        self.platform_agent_instance_id = ''
        self._pa_client = ''

        def done():
            CIOMSClientFactory.destroy_instance(self.oms)
            event_notifications = OmsTestMixin.stop_http_server()
            log.info("event_notifications = %s" % str(event_notifications))

        self.addCleanup(done)

    def _get_platform_attributes(self):
        attr_infos = self.oms.attr.get_platform_attributes(
            'LPJBox_CI_Ben_Hall')
        log.debug('_get_platform_attributes: %s', self._pp.pformat(attr_infos))

        #        ret_infos = attr_infos['LPJBox_CI_Ben_Hall']
        #        for attrName, attr_defn in ret_infos.iteritems():
        #            attr = AttrNode(attrName, attr_defn)
        #            pnode.add_attribute(attr)
        return attr_infos

    @unittest.skip('Still in construction...')
    def test_platform_with_instrument_streaming(self):
        #
        # The following is with just a single platform and the single
        # instrument "SBE37_SIM_08", which corresponds to the one on port 4008.
        #

        #load the paramaters and the param dicts necesssary for the VEL3D
        self._load_params()

        #create the instrument device/agent/mode
        self._create_instrument_resources()

        #create the platform device, agent and instance
        self._create_platform_configuration('LPJBox_CI_Ben_Hall')

        self.rrclient.create_association(subject=self.platform_device,
                                         predicate=PRED.hasDevice,
                                         object=self.instrument_device)

        self._start_platform()
        #        self.addCleanup(self._stop_platform, p_root)

        # get everything in command mode:
        self._ping_agent()
        self._initialize()

        _ia_client = ResourceAgentClient(self.instrument_device,
                                         process=FakeProcess())
        state = _ia_client.get_agent_state()
        log.info("TestPlatformInstrument get_agent_state %s", state)

        self._go_active()
        #        self._run()

        gevent.sleep(3)

        # note that this includes the instrument also getting to the command state

        #        self._stream_instruments()

        # get client to the instrument:
        # the i_obj is a DotDict with various pieces captured during the
        # set-up of the instrument, in particular instrument_device_id
        #i_obj = self._get_instrument(instr_key)

        #        log.debug("KK creating ResourceAgentClient")
        #        ia_client = ResourceAgentClient(i_obj.instrument_device_id,
        #                                        process=FakeProcess())
        #        log.debug("KK got ResourceAgentClient: %s", ia_client)
        #
        #        # verify the instrument is command state:
        #        state = ia_client.get_agent_state()
        #        log.debug("KK instrument state: %s", state)
        #        self.assertEqual(state, ResourceAgentState.COMMAND)

        self._reset()
        self._shutdown()

    def _load_params(self):

        log.info(
            "--------------------------------------------------------------------------------------------------------"
        )
        # load_parameter_scenarios
        self.container.spawn_process(
            "Loader",
            "ion.processes.bootstrap.ion_loader",
            "IONLoader",
            config=dict(
                op="load",
                scenario="BETA",
                path="master",
                categories=
                "ParameterFunctions,ParameterDefs,ParameterDictionary,StreamDefinition",
                clearcols="owner_id,org_ids",
                assets="res/preload/r2_ioc/ooi_assets",
                parseooi="True",
            ))

    def _create_platform_configuration(self,
                                       platform_id,
                                       parent_platform_id=None):
        """
        This method is an adaptation of test_agent_instance_config in
        test_instrument_management_service_integration.py

        @param platform_id
        @param parent_platform_id
        @return a DotDict with various of the constructed elements associated
                to the platform.
        """

        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()

        param_dict_name = 'platform_eng_parsed'
        parsed_rpdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            param_dict_name, id_only=True)
        self.parsed_stream_def_id = self.pubsubclient.create_stream_definition(
            name='parsed', parameter_dictionary_id=parsed_rpdict_id)

        driver_config = PLTFRM_DVR_CONFIG
        driver_config['attributes'] = self._get_platform_attributes(
        )  #self._platform_attributes[platform_id]
        #OMS returning an error for port.get_platform_ports
        #driver_config['ports']      = self._platform_ports[platform_id]
        log.debug("driver_config: %s", driver_config)

        # instance creation
        platform_agent_instance_obj = any_old(RT.PlatformAgentInstance,
                                              {'driver_config': driver_config})

        platform_agent_instance_obj.agent_config = {
            'platform_config': {
                'platform_id': 'LPJBox_CI_Ben_Hall',
                'parent_platform_id': None
            }
        }

        self.platform_agent_instance_id = self.imsclient.create_platform_agent_instance(
            platform_agent_instance_obj)

        # agent creation
        platform_agent_obj = any_old(
            RT.PlatformAgent, {
                "stream_configurations": self._get_platform_stream_configs(),
                'driver_module': PLTFRM_DVR_MOD,
                'driver_class': PLTFRM_DVR_CLS
            })
        platform_agent_id = self.imsclient.create_platform_agent(
            platform_agent_obj)

        # device creation
        self.platform_device = self.imsclient.create_platform_device(
            any_old(RT.PlatformDevice))

        # data product creation
        dp_obj = any_old(RT.DataProduct, {
            "temporal_domain": tdom,
            "spatial_domain": sdom
        })
        dp_id = self.dpclient.create_data_product(
            data_product=dp_obj,
            stream_definition_id=self.parsed_stream_def_id)
        self.damsclient.assign_data_product(
            input_resource_id=self.platform_device, data_product_id=dp_id)
        self.dpclient.activate_data_product_persistence(data_product_id=dp_id)
        self.addCleanup(self.dpclient.delete_data_product, dp_id)

        # assignments
        self.RR2.assign_platform_agent_instance_to_platform_device_with_has_agent_instance(
            self.platform_agent_instance_id, self.platform_device)
        self.RR2.assign_platform_agent_to_platform_agent_instance_with_has_agent_definition(
            platform_agent_id, self.platform_agent_instance_id)
        self.RR2.assign_platform_device_to_org_with_has_resource(
            self.platform_agent_instance_id, self.org_id)

        #######################################
        # dataset

        log.debug('data product = %s', dp_id)

        stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStream, None,
                                                   True)
        log.debug('Data product stream_ids = %s', stream_ids)
        stream_id = stream_ids[0]

        # Retrieve the id of the OUTPUT stream from the out Data Product
        dataset_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasDataset,
                                                    RT.Dataset, True)
        log.debug('Data set for data_product_id1 = %s', dataset_ids[0])
        #######################################

        return

    def _create_instrument_resources(self):
        # Create InstrumentModel
        instModel_obj = IonObject(RT.InstrumentModel,
                                  name='VEL3D',
                                  description="VEL3D")
        instModel_id = self.imsclient.create_instrument_model(instModel_obj)
        log.debug('new InstrumentModel id = %s ', instModel_id)

        raw_config = StreamConfiguration(stream_name='raw',
                                         parameter_dictionary_name='raw')
        vel3d_b_sample = StreamConfiguration(
            stream_name='vel3d_b_sample',
            parameter_dictionary_name='vel3d_b_sample')
        vel3d_b_engineering = StreamConfiguration(
            stream_name='vel3d_b_engineering',
            parameter_dictionary_name='vel3d_b_engineering')

        # Create InstrumentAgent
        instAgent_obj = IonObject(
            RT.InstrumentAgent,
            name='agent007',
            description="SBE37IMAgent",
            driver_uri=
            "http://sddevrepo.oceanobservatories.org/releases/nobska_mavs4_ooicore-0.0.7-py2.7.egg",
            stream_configurations=[
                raw_config, vel3d_b_sample, vel3d_b_engineering
            ])
        instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj)
        log.debug('new InstrumentAgent id = %s', instAgent_id)

        self.imsclient.assign_instrument_model_to_instrument_agent(
            instModel_id, instAgent_id)

        # Create InstrumentDevice
        instDevice_obj = IonObject(RT.InstrumentDevice,
                                   name='VEL3DDevice',
                                   description="VEL3DDevice",
                                   serial_number="12345")
        self.instrument_device = self.imsclient.create_instrument_device(
            instrument_device=instDevice_obj)
        self.imsclient.assign_instrument_model_to_instrument_device(
            instModel_id, self.instrument_device)

        port_agent_config = {
            'device_addr': '10.180.80.6',
            'device_port': 2101,
            'process_type': PortAgentProcessType.UNIX,
            'binary_path': "port_agent",
            'port_agent_addr': 'localhost',
            'command_port': 1025,
            'data_port': 1026,
            'log_level': 5,
            'type': PortAgentType.ETHERNET
        }

        instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance,
                                          name='VEL3DAgentInstance',
                                          description="VEL3DAgentInstance",
                                          port_agent_config=port_agent_config,
                                          alerts=[])

        instAgentInstance_id = self.imsclient.create_instrument_agent_instance(
            instAgentInstance_obj, instAgent_id, self.instrument_device)
        self._start_port_agent(
            self.imsclient.read_instrument_agent_instance(
                instAgentInstance_id))

        vel3d_b_sample_pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            'vel3d_b_sample', id_only=True)
        vel3d_b_sample_stream_def_id = self.pubsubclient.create_stream_definition(
            name='vel3d_b_sample',
            parameter_dictionary_id=vel3d_b_sample_pdict_id)

        vel3d_b_engineering_pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            'vel3d_b_engineering', id_only=True)
        vel3d_b_engineering_stream_def_id = self.pubsubclient.create_stream_definition(
            name='vel3d_b_engineering',
            parameter_dictionary_id=vel3d_b_engineering_pdict_id)

        raw_pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            'raw', id_only=True)
        raw_stream_def_id = self.pubsubclient.create_stream_definition(
            name='raw', parameter_dictionary_id=raw_pdict_id)

        #-------------------------------
        # Create Raw and Parsed Data Products for the device
        #-------------------------------
        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()

        dp_obj = IonObject(RT.DataProduct,
                           name='vel3d_b_sample',
                           description='vel3d_b_sample',
                           temporal_domain=tdom,
                           spatial_domain=sdom)

        data_product_id1 = self.dpclient.create_data_product(
            data_product=dp_obj,
            stream_definition_id=vel3d_b_sample_stream_def_id)
        self.damsclient.assign_data_product(
            input_resource_id=self.instrument_device,
            data_product_id=data_product_id1)
        self.dpclient.activate_data_product_persistence(
            data_product_id=data_product_id1)

        dp_obj = IonObject(RT.DataProduct,
                           name='vel3d_b_engineering',
                           description='vel3d_b_engineering',
                           temporal_domain=tdom,
                           spatial_domain=sdom)

        data_product_id2 = self.dpclient.create_data_product(
            data_product=dp_obj,
            stream_definition_id=vel3d_b_engineering_stream_def_id)
        self.damsclient.assign_data_product(
            input_resource_id=self.instrument_device,
            data_product_id=data_product_id2)
        self.dpclient.activate_data_product_persistence(
            data_product_id=data_product_id2)

        dp_obj = IonObject(RT.DataProduct,
                           name='the raw data',
                           description='raw stream test',
                           temporal_domain=tdom,
                           spatial_domain=sdom)

        data_product_id3 = self.dpclient.create_data_product(
            data_product=dp_obj, stream_definition_id=raw_stream_def_id)
        self.damsclient.assign_data_product(
            input_resource_id=self.instrument_device,
            data_product_id=data_product_id3)
        self.dpclient.activate_data_product_persistence(
            data_product_id=data_product_id3)

    def _start_port_agent(self, instrument_agent_instance_obj=None):
        """
        Construct and start the port agent, ONLY NEEDED FOR INSTRUMENT AGENTS.
        """

        _port_agent_config = instrument_agent_instance_obj.port_agent_config

        # It blocks until the port agent starts up or a timeout
        _pagent = PortAgentProcess.launch_process(_port_agent_config,
                                                  test_mode=True)
        pid = _pagent.get_pid()
        port = _pagent.get_data_port()
        cmd_port = _pagent.get_command_port()
        log.info(
            "IMS:_start_pagent returned from PortAgentProcess.launch_process pid: %s ",
            pid)

        # Hack to get ready for DEMO.  Further though needs to be put int
        # how we pass this config info around.
        host = 'localhost'

        driver_config = instrument_agent_instance_obj.driver_config
        comms_config = driver_config.get('comms_config')
        if comms_config:
            host = comms_config.get('addr')
        else:
            log.warn("No comms_config specified, using '%s'" % host)

        # Configure driver to use port agent port number.
        instrument_agent_instance_obj.driver_config['comms_config'] = {
            'addr': host,
            'cmd_port': cmd_port,
            'port': port
        }
        instrument_agent_instance_obj.driver_config['pagent_pid'] = pid
        self.imsclient.update_instrument_agent_instance(
            instrument_agent_instance_obj)
        return self.imsclient.read_instrument_agent_instance(
            instrument_agent_instance_obj._id)

    def _start_platform(self):
        """
        Starts the given platform waiting for it to transition to the
        UNINITIALIZED state (note that the agent starts in the LAUNCHING state).

        More in concrete the sequence of steps here are:
        - prepares subscriber to receive the UNINITIALIZED state transition
        - launches the platform process
        - waits for the start of the process
        - waits for the transition to the UNINITIALIZED state
        """
        ##############################################################
        # prepare to receive the UNINITIALIZED state transition:
        async_res = AsyncResult()

        def consume_event(evt, *args, **kwargs):
            log.debug("Got ResourceAgentStateEvent %s from origin %r",
                      evt.state, evt.origin)
            if evt.state == PlatformAgentState.UNINITIALIZED:
                async_res.set(evt)

        # start subscriber:
        sub = EventSubscriber(event_type="ResourceAgentStateEvent",
                              origin=self.platform_device,
                              callback=consume_event)
        sub.start()
        log.info(
            "registered event subscriber to wait for state=%r from origin %r",
            PlatformAgentState.UNINITIALIZED, self.platform_device)
        #self._event_subscribers.append(sub)
        sub._ready_event.wait(timeout=EVENT_TIMEOUT)

        ##############################################################
        # now start the platform:
        agent_instance_id = self.platform_agent_instance_id
        log.debug("about to call start_platform_agent_instance with id=%s",
                  agent_instance_id)
        pid = self.imsclient.start_platform_agent_instance(
            platform_agent_instance_id=agent_instance_id)
        log.debug("start_platform_agent_instance returned pid=%s", pid)

        #wait for start
        agent_instance_obj = self.imsclient.read_platform_agent_instance(
            agent_instance_id)
        gate = AgentProcessStateGate(self.processdispatchclient.read_process,
                                     self.platform_device._id,
                                     ProcessStateEnum.RUNNING)
        self.assertTrue(
            gate. await (90),
            "The platform agent instance did not spawn in 90 seconds")

        # Start a resource agent client to talk with the agent.
        self._pa_client = ResourceAgentClient(self.platform_device,
                                              name=gate.process_id,
                                              process=FakeProcess())
        log.debug("got platform agent client %s", str(self._pa_client))

        ##############################################################
        # wait for the UNINITIALIZED event:
        async_res.get(timeout=self._receive_timeout)