コード例 #1
0
class TestDataProductProvenance(IonIntegrationTestCase):

    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.pubsubclient =  PubsubManagementServiceClient(node=self.container.node)
        self.ingestclient = IngestionManagementServiceClient(node=self.container.node)
        self.dpmsclient = DataProductManagementServiceClient(node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
        self.omsclient = ObservatoryManagementServiceClient(node=self.container.node)
        self.process_dispatcher   = ProcessDispatcherServiceClient()

        self.dataset_management = DatasetManagementServiceClient()


        # deactivate all data processes when tests are complete
        def killAllDataProcesses():
            for proc_id in self.rrclient.find_resources(RT.DataProcess, None, None, True)[0]:
                self.dataprocessclient.deactivate_data_process(proc_id)
                self.dataprocessclient.delete_data_process(proc_id)
        self.addCleanup(killAllDataProcesses)

    def test_get_data_product_provenance_report(self):
        #Create a test device
        device_obj = Device(name='Device1',
                                        description='test instrument site')
        device_id, _ = self.rrclient.create(device_obj)
        self.addCleanup(self.rrclient.delete, device_id)

        #Create a test DataProduct
        data_product1_obj = DataProduct(name='DataProduct1',
                                        description='test data product 1')
        data_product1_id, _ = self.rrclient.create(data_product1_obj)
        self.addCleanup(self.rrclient.delete, data_product1_id)

        #Create a test DataProcess
        data_process_obj = DataProcess(name='DataProcess',
                                       description='test data process')
        data_process_id, _ = self.rrclient.create(data_process_obj)
        self.addCleanup(self.rrclient.delete, data_process_id)

        #Create a second test DataProduct
        data_product2_obj = DataProduct(name='DataProduct2',
                                        description='test data product 2')
        data_product2_id, _ = self.rrclient.create(data_product2_obj)
        self.addCleanup(self.rrclient.delete, data_product2_id)

        #Create a test DataProducer
        data_producer_obj = DataProducer(name='DataProducer',
                                         description='test data producer')
        data_producer_id, rev = self.rrclient.create(data_producer_obj)

        #Link the DataProcess to the second DataProduct manually
        assoc_id, _ = self.rrclient.create_association(subject=data_process_id, predicate=PRED.hasInputProduct, object=data_product2_id)
        self.addCleanup(self.rrclient.delete_association, assoc_id)

        # Register the instrument and process. This links the device and the data process
        # with their own producers
        self.damsclient.register_instrument(device_id)
        self.addCleanup(self.damsclient.unregister_instrument, device_id)
        self.damsclient.register_process(data_process_id)
        self.addCleanup(self.damsclient.unregister_process, data_process_id)

        #Manually link the first DataProduct with the test DataProducer
        assoc_id, _ = self.rrclient.create_association(subject=data_product1_id, predicate=PRED.hasDataProducer, object=data_producer_id)

        #Get the DataProducer linked to the DataProcess (created in register_process above)
        #Associate that with with DataProduct1's DataProducer
        data_process_producer_ids, _ = self.rrclient.find_objects(subject=data_process_id, predicate=PRED.hasDataProducer, object_type=RT.DataProducer, id_only=True)
        assoc_id, _ = self.rrclient.create_association(subject=data_process_producer_ids[0], predicate=PRED.hasParent, object=data_producer_id)
        self.addCleanup(self.rrclient.delete_association, assoc_id)

        #Get the DataProducer linked to the Device (created in register_instrument
        #Associate that with the DataProcess's DataProducer
        device_producer_ids, _ = self.rrclient.find_objects(subject=device_id, predicate=PRED.hasDataProducer, object_type=RT.DataProducer, id_only=True)
        assoc_id, _ = self.rrclient.create_association(subject=data_producer_id, predicate=PRED.hasParent, object=device_producer_ids[0])

        #Create the links between the Device, DataProducts, DataProcess, and all DataProducers
        self.damsclient.assign_data_product(input_resource_id=device_id, data_product_id=data_product1_id)
        self.addCleanup(self.damsclient.unassign_data_product, device_id, data_product1_id)
        self.damsclient.assign_data_product(input_resource_id=data_process_id, data_product_id=data_product2_id)
        self.addCleanup(self.damsclient.unassign_data_product, data_process_id, data_product2_id)

        #Traverse through the relationships to get the links between objects
        res = self.dpmsclient.get_data_product_provenance_report(data_product2_id)

        #Make sure there are four keys
        self.assertEqual(len(res.keys()), 4)

        parent_count = 0
        config_count = 0
        for v in res.itervalues():
            if 'parent' in v:
                parent_count += 1
            if 'config' in v:
                config_count += 1

        #Make sure there are three parents and four configs
        self.assertEqual(parent_count, 3)
        self.assertEqual(config_count, 4)


    @unittest.skip('This test is obsolete with new framework')
    def test_get_provenance(self):

        #create a deployment with metadata and an initial site and device
        instrument_site_obj = IonObject(RT.InstrumentSite,
                                        name='InstrumentSite1',
                                        description='test instrument site')
        instrument_site_id = self.omsclient.create_instrument_site(instrument_site_obj, "")
        log.debug( 'test_get_provenance: new instrument_site_id id = %s ', str(instrument_site_id))


        # Create InstrumentModel
        instModel_obj = IonObject(RT.InstrumentModel,
                                  name='SBE37IMModel',
                                  description="SBE37IMModel" )

        try:
            instModel_id = self.imsclient.create_instrument_model(instModel_obj)
        except BadRequest as ex:
            self.fail("failed to create new InstrumentModel: %s" %ex)
        log.debug( 'test_get_provenance: new InstrumentModel id = %s ', str(instModel_id))

        self.omsclient.assign_instrument_model_to_instrument_site(instModel_id, instrument_site_id)


        # Create InstrumentAgent
        parsed_config = StreamConfiguration(stream_name='parsed', parameter_dictionary_name='ctd_parsed_param_dict' )
        instAgent_obj = IonObject(RT.InstrumentAgent,
                                name='agent007',
                                description="SBE37IMAgent",
                                driver_uri=DRV_URI_GOOD,
                                stream_configurations = [parsed_config] )
        try:
            instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj)
        except BadRequest as ex:
            self.fail("failed to create new InstrumentAgent: %s" %ex)
        log.debug( 'test_get_provenance:new InstrumentAgent id = %s', instAgent_id)

        self.imsclient.assign_instrument_model_to_instrument_agent(instModel_id, instAgent_id)

        # Create InstrumentDevice
        log.debug('test_get_provenance: Create instrument resource to represent the SBE37 (SA Req: L4-CI-SA-RQ-241) ')
        instDevice_obj = IonObject(RT.InstrumentDevice, name='SBE37IMDevice', description="SBE37IMDevice", serial_number="12345" )
        try:
            instDevice_id = self.imsclient.create_instrument_device(instrument_device=instDevice_obj)
            self.imsclient.assign_instrument_model_to_instrument_device(instModel_id, instDevice_id)
        except BadRequest as ex:
            self.fail("failed to create new InstrumentDevice: %s" %ex)

        log.debug("test_get_provenance: new InstrumentDevice id = %s    (SA Req: L4-CI-SA-RQ-241) ", instDevice_id)


        #-------------------------------
        # Create CTD Parsed  data product
        #-------------------------------
        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()


        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        parsed_stream_def_id = self.pubsubclient.create_stream_definition(name='parsed', parameter_dictionary_id=pdict_id)

        log.debug( 'test_get_provenance:Creating new CDM data product with a stream definition')


        dp_obj = IonObject(RT.DataProduct,
            name='the parsed data',
            description='ctd stream test',
            temporal_domain = tdom,
            spatial_domain = sdom)

        ctd_parsed_data_product = self.dpmsclient.create_data_product(data_product=dp_obj, stream_definition_id=parsed_stream_def_id)
        log.debug( 'new dp_id = %s', ctd_parsed_data_product)

        self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=ctd_parsed_data_product)
        self.dpmsclient.activate_data_product_persistence(data_product_id=ctd_parsed_data_product)

        #-------------------------------
        # create a data product for the site to pass the OMS check.... we need to remove this check
        #-------------------------------
        dp_obj = IonObject(RT.DataProduct,
            name='DP1',
            description='some new dp',
            temporal_domain = tdom,
            spatial_domain = sdom)

        log_data_product_id = self.dpmsclient.create_data_product(dp_obj, parsed_stream_def_id)


        #-------------------------------
        # Deploy instrument device to instrument site
        #-------------------------------
        deployment_obj = IonObject(RT.Deployment,
                                        name='TestDeployment',
                                        description='some new deployment')
        deployment_id = self.omsclient.create_deployment(deployment_obj)
        self.omsclient.deploy_instrument_site(instrument_site_id, deployment_id)
        self.imsclient.deploy_instrument_device(instDevice_id, deployment_id)

        log.debug("test_create_deployment: created deployment id: %s ", str(deployment_id) )

        self.omsclient.activate_deployment(deployment_id)
        inst_device_objs, _ = self.rrclient.find_objects(subject=instrument_site_id, predicate=PRED.hasDevice, object_type=RT.InstrumetDevice, id_only=False)
        log.debug("test_create_deployment: deployed device: %s ", str(inst_device_objs[0]) )

        #-------------------------------
        # Create the agent instance
        #-------------------------------

        port_agent_config = {
            'device_addr':  CFG.device.sbe37.host,
            'device_port':  CFG.device.sbe37.port,
            'process_type': PortAgentProcessType.UNIX,
            'binary_path': "port_agent",
            'port_agent_addr': 'localhost',
            'command_port': CFG.device.sbe37.port_agent_cmd_port,
            'data_port': CFG.device.sbe37.port_agent_data_port,
            'log_level': 5,
            'type': PortAgentType.ETHERNET
        }


        instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='SBE37IMAgentInstance',
            description="SBE37IMAgentInstance",
            port_agent_config = port_agent_config)

        instAgentInstance_id = self.imsclient.create_instrument_agent_instance(instAgentInstance_obj, instAgent_id, instDevice_id)


        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Data Process Definition
        #-------------------------------
        log.debug("TestDataProductProvenance: create data process definition ctd_L0_all")
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='ctd_L0_all',
                            description='transform ctd package into three separate L0 streams',
                            module='ion.processes.data.transforms.ctd.ctd_L0_all',
                            class_name='ctd_L0_all')
        try:
            ctd_L0_all_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
        except BadRequest as ex:
            self.fail("failed to create new ctd_L0_all data process definition: %s" %ex)


        #-------------------------------
        # L1 Conductivity: Data Process Definition
        #-------------------------------
        log.debug("TestDataProductProvenance: create data process definition CTDL1ConductivityTransform")
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='ctd_L1_conductivity',
                            description='create the L1 conductivity data product',
                            module='ion.processes.data.transforms.ctd.ctd_L1_conductivity',
                            class_name='CTDL1ConductivityTransform')
        try:
            ctd_L1_conductivity_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
        except BadRequest as ex:
            self.fail("failed to create new CTDL1ConductivityTransform data process definition: %s" %ex)

        #-------------------------------
        # L1 Pressure: Data Process Definition
        #-------------------------------
        log.debug("TestDataProductProvenance: create data process definition CTDL1PressureTransform")
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='ctd_L1_pressure',
                            description='create the L1 pressure data product',
                            module='ion.processes.data.transforms.ctd.ctd_L1_pressure',
                            class_name='CTDL1PressureTransform')
        try:
            ctd_L1_pressure_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
        except BadRequest as ex:
            self.fail("failed to create new CTDL1PressureTransform data process definition: %s" %ex)


        #-------------------------------
        # L1 Temperature: Data Process Definition
        #-------------------------------
        log.debug("TestDataProductProvenance: create data process definition CTDL1TemperatureTransform")
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='ctd_L1_temperature',
                            description='create the L1 temperature data product',
                            module='ion.processes.data.transforms.ctd.ctd_L1_temperature',
                            class_name='CTDL1TemperatureTransform')
        try:
            ctd_L1_temperature_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
        except BadRequest as ex:
            self.fail("failed to create new CTDL1TemperatureTransform data process definition: %s" %ex)


        #-------------------------------
        # L2 Salinity: Data Process Definition
        #-------------------------------
        log.debug("TestDataProductProvenance: create data process definition SalinityTransform")
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='ctd_L2_salinity',
                            description='create the L1 temperature data product',
                            module='ion.processes.data.transforms.ctd.ctd_L2_salinity',
                            class_name='SalinityTransform')
        try:
            ctd_L2_salinity_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
        except BadRequest as ex:
            self.fail("failed to create new SalinityTransform data process definition: %s" %ex)


        #-------------------------------
        # L2 Density: Data Process Definition
        #-------------------------------
        log.debug("TestDataProductProvenance: create data process definition DensityTransform")
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='ctd_L2_density',
                            description='create the L1 temperature data product',
                            module='ion.processes.data.transforms.ctd.ctd_L2_density',
                            class_name='DensityTransform')
        try:
            ctd_L2_density_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
        except BadRequest as ex:
            self.fail("failed to create new DensityTransform data process definition: %s" %ex)



        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Output Data Products
        #-------------------------------

        outgoing_stream_l0_conductivity_id = self.pubsubclient.create_stream_definition(name='L0_Conductivity', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_conductivity_id, ctd_L0_all_dprocdef_id, binding='conductivity' )

        outgoing_stream_l0_pressure_id = self.pubsubclient.create_stream_definition(name='L0_Pressure', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_pressure_id, ctd_L0_all_dprocdef_id, binding='pressure' )

        outgoing_stream_l0_temperature_id = self.pubsubclient.create_stream_definition(name='L0_Temperature', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_temperature_id, ctd_L0_all_dprocdef_id, binding='temperature' )


        log.debug("TestDataProductProvenance: create output data product L0 conductivity")

        ctd_l0_conductivity_output_dp_obj = IonObject(  RT.DataProduct,
                                                        name='L0_Conductivity',
                                                        description='transform output conductivity',
                                                        temporal_domain = tdom,
                                                        spatial_domain = sdom)

        ctd_l0_conductivity_output_dp_id = self.dpmsclient.create_data_product(ctd_l0_conductivity_output_dp_obj,
                                                                                outgoing_stream_l0_conductivity_id)


        log.debug("TestDataProductProvenance: create output data product L0 pressure")

        ctd_l0_pressure_output_dp_obj = IonObject(  RT.DataProduct,
                                                    name='L0_Pressure',
                                                    description='transform output pressure',
                                                    temporal_domain = tdom,
                                                    spatial_domain = sdom)

        ctd_l0_pressure_output_dp_id = self.dpmsclient.create_data_product(ctd_l0_pressure_output_dp_obj,
                                                                            outgoing_stream_l0_pressure_id)
        log.debug("TestDataProductProvenance: create output data product L0 temperature")

        ctd_l0_temperature_output_dp_obj = IonObject(   RT.DataProduct,
                                                        name='L0_Temperature',
                                                        description='transform output temperature',
                                                        temporal_domain = tdom,
                                                        spatial_domain = sdom)

        ctd_l0_temperature_output_dp_id = self.dpmsclient.create_data_product(ctd_l0_temperature_output_dp_obj,
                                                                                outgoing_stream_l0_temperature_id)

        #-------------------------------
        # L1 Conductivity - Temperature - Pressure: Output Data Products
        #-------------------------------

        outgoing_stream_l1_conductivity_id = self.pubsubclient.create_stream_definition(name='L1_conductivity', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l1_conductivity_id, ctd_L1_conductivity_dprocdef_id, binding='conductivity' )

        outgoing_stream_l1_pressure_id = self.pubsubclient.create_stream_definition(name='L1_Pressure', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l1_pressure_id, ctd_L1_pressure_dprocdef_id, binding='pressure' )

        outgoing_stream_l1_temperature_id = self.pubsubclient.create_stream_definition(name='L1_Temperature', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l1_temperature_id, ctd_L1_temperature_dprocdef_id, binding='temperature' )

        log.debug("TestDataProductProvenance: create output data product L1 conductivity")

        ctd_l1_conductivity_output_dp_obj = IonObject(RT.DataProduct,
            name='L1_Conductivity',
            description='transform output L1 conductivity',
            temporal_domain = tdom,
            spatial_domain = sdom)

        ctd_l1_conductivity_output_dp_id = self.dpmsclient.create_data_product(ctd_l1_conductivity_output_dp_obj,
                                                                                outgoing_stream_l1_conductivity_id)


        log.debug("TestDataProductProvenance: create output data product L1 pressure")

        ctd_l1_pressure_output_dp_obj = IonObject(  RT.DataProduct,
                                                    name='L1_Pressure',
                                                    description='transform output L1 pressure',
                                                    temporal_domain = tdom,
                                                    spatial_domain = sdom)

        ctd_l1_pressure_output_dp_id = self.dpmsclient.create_data_product(ctd_l1_pressure_output_dp_obj,
                                                                            outgoing_stream_l1_pressure_id)


        log.debug("TestDataProductProvenance: create output data product L1 temperature")

        ctd_l1_temperature_output_dp_obj = IonObject(   RT.DataProduct,
                                                        name='L1_Temperature',
                                                        description='transform output L1 temperature',
                                                        temporal_domain = tdom,
                                                        spatial_domain = sdom)

        ctd_l1_temperature_output_dp_id = self.dpmsclient.create_data_product(ctd_l1_temperature_output_dp_obj,
                                                                                outgoing_stream_l1_temperature_id)

        #-------------------------------
        # L2 Salinity - Density: Output Data Products
        #-------------------------------

        outgoing_stream_l2_salinity_id = self.pubsubclient.create_stream_definition(name='L2_salinity', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l2_salinity_id, ctd_L2_salinity_dprocdef_id, binding='salinity' )

        outgoing_stream_l2_density_id = self.pubsubclient.create_stream_definition(name='L2_Density', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l2_density_id, ctd_L2_density_dprocdef_id, binding='density' )

        log.debug("TestDataProductProvenance: create output data product L2 Salinity")

        ctd_l2_salinity_output_dp_obj = IonObject( RT.DataProduct,
                                                    name='L2_Salinity',
                                                    description='transform output L2 salinity',
                                                    temporal_domain = tdom,
                                                    spatial_domain = sdom)


        ctd_l2_salinity_output_dp_id = self.dpmsclient.create_data_product(ctd_l2_salinity_output_dp_obj,
                                                                            outgoing_stream_l2_salinity_id)


        log.debug("TestDataProductProvenance: create output data product L2 Density")

#        ctd_l2_density_output_dp_obj = IonObject(   RT.DataProduct,
#                                                    name='L2_Density',
#                                                    description='transform output pressure',
#                                                    temporal_domain = tdom,
#                                                    spatial_domain = sdom)
#
#        ctd_l2_density_output_dp_id = self.dpmsclient.create_data_product(ctd_l2_density_output_dp_obj,
#                                                                            outgoing_stream_l2_density_id,
#                                                                            parameter_dictionary)

        contactInfo = ContactInformation()
        contactInfo.individual_names_given = "Bill"
        contactInfo.individual_name_family = "Smith"
        contactInfo.street_address = "111 First St"
        contactInfo.city = "San Diego"
        contactInfo.email = "*****@*****.**"
        contactInfo.phones = ["858-555-6666"]
        contactInfo.country = "USA"
        contactInfo.postal_code = "92123"

        ctd_l2_density_output_dp_obj = IonObject(   RT.DataProduct,
                                                    name='L2_Density',
                                                    description='transform output pressure',
                                                    contacts = [contactInfo],
                                                    iso_topic_category = "my_iso_topic_category_here",
                                                    quality_control_level = "1",
                                                    temporal_domain = tdom,
                                                    spatial_domain = sdom)

        ctd_l2_density_output_dp_id = self.dpmsclient.create_data_product(ctd_l2_density_output_dp_obj,
                                                                            outgoing_stream_l2_density_id)

        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Create the data process
        #-------------------------------
        log.debug("TestDataProductProvenance: create L0 all data_process start")
        try:
            input_data_products = [ctd_parsed_data_product]
            output_data_products = [ctd_l0_conductivity_output_dp_id, ctd_l0_pressure_output_dp_id, ctd_l0_temperature_output_dp_id]

            ctd_l0_all_data_process_id = self.dataprocessclient.create_data_process(
                data_process_definition_id = ctd_L0_all_dprocdef_id,
                in_data_product_ids = input_data_products,
                out_data_product_ids = output_data_products
            )
            #activate only this data process just for coverage
            self.dataprocessclient.activate_data_process(ctd_l0_all_data_process_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" %ex)

        contents = "this is the lookup table  contents, replace with a file..."
        att = IonObject(RT.Attachment, name='deviceLookupTable', content=base64.encodestring(contents), keywords=['DataProcessInput'], attachment_type=AttachmentType.ASCII)
        deviceAttachment = self.rrclient.create_attachment(ctd_l0_all_data_process_id, att)
        log.info( 'test_createTransformsThenActivateInstrument: InstrumentDevice attachment id = %s', deviceAttachment)

        log.debug("TestDataProductProvenance: create L0 all data_process return")


        #-------------------------------
        # L1 Conductivity: Create the data process
        #-------------------------------
        log.debug("TestDataProductProvenance: create L1 Conductivity data_process start")
        try:
            l1_conductivity_data_process_id = self.dataprocessclient.create_data_process(
                data_process_definition_id = ctd_L1_conductivity_dprocdef_id,
                in_data_product_ids = [ctd_l0_conductivity_output_dp_id],
                out_data_product_ids = [ctd_l1_conductivity_output_dp_id])
            
            self.dataprocessclient.activate_data_process(l1_conductivity_data_process_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" %ex)




        #-------------------------------
        # L1 Pressure: Create the data process
        #-------------------------------
        log.debug("TestDataProductProvenance: create L1_Pressure data_process start")
        try:
            l1_pressure_data_process_id = self.dataprocessclient.create_data_process(
                data_process_definition_id = ctd_L1_pressure_dprocdef_id,
                in_data_product_ids = [ctd_l0_pressure_output_dp_id],
                out_data_product_ids = [ctd_l1_pressure_output_dp_id])

            self.dataprocessclient.activate_data_process(l1_pressure_data_process_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" %ex)


        #-------------------------------
        # L1 Temperature: Create the data process
        #-------------------------------
        log.debug("TestDataProductProvenance: create L1_Pressure data_process start")
        try:
            l1_temperature_all_data_process_id = self.dataprocessclient.create_data_process(
                data_process_definition_id = ctd_L1_temperature_dprocdef_id,
                in_data_product_ids = [ctd_l0_temperature_output_dp_id],
                out_data_product_ids = [ctd_l1_temperature_output_dp_id])

            self.dataprocessclient.activate_data_process(l1_temperature_all_data_process_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" %ex)

        #-------------------------------
        # L2 Salinity: Create the data process
        #-------------------------------
        log.debug("TestDataProductProvenance: create L2_salinity data_process start")
        try:
            l2_salinity_all_data_process_id = self.dataprocessclient.create_data_process(
                data_process_definition_id = ctd_L2_salinity_dprocdef_id,
                in_data_product_ids = [ctd_l1_conductivity_output_dp_id, ctd_l1_pressure_output_dp_id, ctd_l1_temperature_output_dp_id],
                out_data_product_ids = [ctd_l2_salinity_output_dp_id])

            self.dataprocessclient.activate_data_process(l2_salinity_all_data_process_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" %ex)


        #-------------------------------
        # L2 Density: Create the data process
        #-------------------------------
        log.debug("TestDataProductProvenance: create L2_Density data_process start")
        try:
            in_dp_ids = [ctd_l1_conductivity_output_dp_id, ctd_l1_pressure_output_dp_id, ctd_l1_temperature_output_dp_id]
            out_dp_ids = [ctd_l2_density_output_dp_id]

            l2_density_all_data_process_id = self.dataprocessclient.create_data_process(
                data_process_definition_id = ctd_L2_density_dprocdef_id,
                in_data_product_ids = in_dp_ids,
                out_data_product_ids = out_dp_ids)

            self.dataprocessclient.activate_data_process(l2_density_all_data_process_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" %ex)



        #-------------------------------
        # Launch InstrumentAgentInstance, connect to the resource agent client
        #-------------------------------
        self.imsclient.start_instrument_agent_instance(instrument_agent_instance_id=instAgentInstance_id)

        inst_agent_instance_obj= self.imsclient.read_instrument_agent_instance(instAgentInstance_id)
        print 'TestDataProductProvenance: Instrument agent instance obj: = ', inst_agent_instance_obj

        # Start a resource agent client to talk with the instrument agent.
#        self._ia_client = ResourceAgentClient('iaclient', name=ResourceAgentClient._get_agent_process_id(instDevice_id,  process=FakeProcess())
#        print 'activate_instrument: got ia client %s', self._ia_client
#        log.debug(" test_createTransformsThenActivateInstrument:: got ia client %s", str(self._ia_client))


        #-------------------------------
        # Deactivate InstrumentAgentInstance
        #-------------------------------
        self.imsclient.stop_instrument_agent_instance(instrument_agent_instance_id=instAgentInstance_id)

        self.dataprocessclient.deactivate_data_process(l2_density_all_data_process_id)
        self.dataprocessclient.deactivate_data_process(l2_salinity_all_data_process_id)
        self.dataprocessclient.deactivate_data_process(l1_temperature_all_data_process_id)
        self.dataprocessclient.deactivate_data_process(l1_pressure_data_process_id)
        self.dataprocessclient.deactivate_data_process(l1_conductivity_data_process_id)
        self.dataprocessclient.deactivate_data_process(ctd_l0_all_data_process_id)

        #-------------------------------
        # Retrieve the provenance info for the ctd density data product
        #-------------------------------
        provenance_dict = self.dpmsclient.get_data_product_provenance(ctd_l2_density_output_dp_id)
        log.debug("TestDataProductProvenance: provenance_dict  %s", str(provenance_dict))

        #validate that products are represented
        self.assertTrue (provenance_dict[str(ctd_l1_conductivity_output_dp_id)])
        self.assertTrue (provenance_dict[str(ctd_l0_conductivity_output_dp_id)])
        self.assertTrue (provenance_dict[str(ctd_l2_density_output_dp_id)])
        self.assertTrue (provenance_dict[str(ctd_l1_temperature_output_dp_id)])
        self.assertTrue (provenance_dict[str(ctd_l0_temperature_output_dp_id)])

        density_dict = (provenance_dict[str(ctd_l2_density_output_dp_id)])
        self.assertEquals(density_dict['producer'], [l2_density_all_data_process_id])


        #-------------------------------
        # Retrieve the extended resource for this data product
        #-------------------------------
        extended_product = self.dpmsclient.get_data_product_extension(ctd_l2_density_output_dp_id)
        self.assertEqual(1, len(extended_product.data_processes) )
        self.assertEqual(3, len(extended_product.process_input_data_products) )
#        log.debug("TestDataProductProvenance: DataProduct provenance_product_list  %s", str(extended_product.provenance_product_list))
#        log.debug("TestDataProductProvenance: DataProduct data_processes  %s", str(extended_product.data_processes))
#        log.debug("TestDataProductProvenance: DataProduct process_input_data_products  %s", str(extended_product.process_input_data_products))
#        log.debug("TestDataProductProvenance: provenance  %s", str(extended_product.computed.provenance.value))

        #-------------------------------
        # Retrieve the extended resource for this data process
        #-------------------------------
        extended_process_def = self.dataprocessclient.get_data_process_definition_extension(ctd_L0_all_dprocdef_id)

#        log.debug("TestDataProductProvenance: DataProcess extended_process_def  %s", str(extended_process_def))
#        log.debug("TestDataProductProvenance: DataProcess data_processes  %s", str(extended_process_def.data_processes))
#        log.debug("TestDataProductProvenance: DataProcess data_products  %s", str(extended_process_def.data_products))
        self.assertEqual(1, len(extended_process_def.data_processes) )
        self.assertEqual(3, len(extended_process_def.output_stream_definitions) )
        self.assertEqual(3, len(extended_process_def.data_products) ) #one list because of one data process

        #-------------------------------
        # Request the xml report
        #-------------------------------
        results = self.dpmsclient.get_data_product_provenance_report(ctd_l2_density_output_dp_id)
        print results

        #-------------------------------
        # Cleanup
        #-------------------------------

        self.dpmsclient.delete_data_product(ctd_parsed_data_product)
        self.dpmsclient.delete_data_product(log_data_product_id)
        self.dpmsclient.delete_data_product(ctd_l0_conductivity_output_dp_id)
        self.dpmsclient.delete_data_product(ctd_l0_pressure_output_dp_id)
        self.dpmsclient.delete_data_product(ctd_l0_temperature_output_dp_id)
        self.dpmsclient.delete_data_product(ctd_l1_conductivity_output_dp_id)
        self.dpmsclient.delete_data_product(ctd_l1_pressure_output_dp_id)
        self.dpmsclient.delete_data_product(ctd_l1_temperature_output_dp_id)
        self.dpmsclient.delete_data_product(ctd_l2_salinity_output_dp_id)
        self.dpmsclient.delete_data_product(ctd_l2_density_output_dp_id)
class TestIntDataAcquisitionManagementService(IonIntegrationTestCase):

    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataAcquisitionManagementService
        self.client = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(node=self.container.node)

    def tearDown(self):
        pass


    #@unittest.skip('Not done yet.')
    def test_data_source_ops(self):
        # test creating a new data source
        print 'Creating new data source'
        datasource_obj = IonObject(RT.DataSource,
                           name='DataSource1',
                           description='instrument based new source' ,
                            type='sbe37')
        try:
            ds_id = self.client.create_data_source(datasource_obj)
        except BadRequest as ex:
            self.fail("failed to create new data source: %s" %ex)
        print 'new data source id = ', ds_id


        # test reading a non-existent data source
        print 'reading non-existent data source'
        try:
            dp_obj = self.client.read_data_source('some_fake_id')
        except NotFound as ex:
            pass
        else:
            self.fail("non-existing data source was found during read: %s" %dp_obj)

        # update a data source (tests read also)
        print 'Updating data source'
        # first get the existing data source object
        try:
            datasource_obj = self.client.read_data_source(ds_id)
        except NotFound as ex:
            self.fail("existing data source was not found during read")
        else:
            pass

        # now tweak the object
        datasource_obj.description = 'the very first data source'
        # now write the dp back to the registry
        try:
            update_result = self.client.update_data_source(datasource_obj)
        except NotFound as ex:
            self.fail("existing data source was not found during update")
        except Conflict as ex:
            self.fail("revision conflict exception during data source update")
        #else:
        #    self.assertTrue(update_result == True)
        # now get the data source back to see if it was updated
        try:
            datasource_obj = self.client.read_data_source(ds_id)
        except NotFound as ex:
            self.fail("existing data source was not found during read")
        else:
            pass
        self.assertTrue(datasource_obj.description == 'the very first data source')


        # now 'delete' the data source
        print "deleting data source"
        try:
            delete_result = self.client.delete_data_source(ds_id)
        except NotFound as ex:
            self.fail("existing data source was not found during delete")
        #self.assertTrue(delete_result == True)
        # now try to get the deleted dp object
        try:
            dp_obj = self.client.read_data_source(ds_id)
        except NotFound as ex:
            pass
        else:
            self.fail("deleted data source was found during read")

        # now try to delete the already deleted data source object
        print "deleting non-existing data source"
        try:
            delete_result = self.client.delete_data_source(ds_id)
        except NotFound as ex:
            pass
        else:
            self.fail("non-existing data source was found during delete")


    #@unittest.skip('Not done yet.')
    def test_register_instrument(self):
        # Register an instrument as a data producer in coordination with DM PubSub: create stream, register and create producer object


        # set up initial instrument to register
        instrument_obj = IonObject(RT.InstrumentDevice, name='Inst1',description='an instrument that is creating the data product')
        instrument_id, rev = self.rrclient.create(instrument_obj)

        dataproduct_obj = IonObject(RT.DataProduct, name='DataProduct1',description='sample data product')
        dataproduct_id, rev = self.rrclient.create(dataproduct_obj)

        # test registering a new data producer
        try:
            ds_id = self.client.register_instrument(instrument_id)
        except BadRequest as ex:
            self.fail("failed to create new data producer: %s" %ex)
        print 'new data producer id = ', ds_id


        # test assigning a data product to an instrument, creating the stream for the product
        try:
            self.client.assign_data_product(instrument_id, dataproduct_id, True)
        except BadRequest as ex:
            self.fail("failed to assign data product to data producer: %s" %ex)
        except NotFound as ex:
            self.fail("failed to assign data product to data producer: %s" %ex)

        # test UNassigning a data product from instrument, deleting the stream for the product
        try:
            self.client.unassign_data_product(instrument_id, dataproduct_id, True)
        except BadRequest as ex:
            self.fail("failed to failed to UNassign data product to data producer data producer: %s" %ex)
        except NotFound as ex:
            self.fail("failed to failed to UNassign data product to data producer data producer: %s" %ex)

        # test UNregistering a new data producer
        try:
            ds_id = self.client.unregister_instrument(instrument_id)
        except NotFound as ex:
            self.fail("failed to unregister instrument producer: %s" %ex)


    def test_register_external_data_set(self):
        # Register an external data set as a data producer in coordination with DM PubSub: create stream, register and create producer object


        # set up initial instrument to register
        ext_dataset_obj = IonObject(RT.ExternalDataset, name='DataSet1',description='an external data feed')
        ext_dataset_id, rev = self.rrclient.create(ext_dataset_obj)

        dataproduct_obj = IonObject(RT.DataProduct, name='DataProduct1',description='sample data product')
        dataproduct_id, rev = self.rrclient.create(dataproduct_obj)


        # test registering a new external data set
        try:
            ds_id = self.client.register_external_data_set(ext_dataset_id)
        except BadRequest as ex:
            self.fail("failed to create new data producer: %s" %ex)
        print 'new data producer id = ', ds_id

        # test assigning a data product to an ext_dataset_id, creating the stream for the product
        try:
            self.client.assign_data_product(ext_dataset_id, dataproduct_id, True)
        except BadRequest as ex:
            self.fail("failed to assign data product to data producer: %s" %ex)
        except NotFound as ex:
            self.fail("failed to assign data product to data producer: %s" %ex)

        # test UNassigning a data product from ext_dataset_id, deleting the stream for the product
        try:
            self.client.unassign_data_product(ext_dataset_id, dataproduct_id, True)
        except BadRequest as ex:
            self.fail("failed to failed to UNassign data product to data producer data producer: %s" %ex)
        except NotFound as ex:
            self.fail("failed to failed to UNassign data product to data producer data producer: %s" %ex)

        # test UNregistering a external data set
        try:
            ds_id = self.client.unregister_external_data_set(ext_dataset_id)
        except NotFound as ex:
            self.fail("failed to unregister instrument producer: %s" %ex)



    def test_register_process(self):
        # Register a data process as a data producer in coordination with DM PubSub: create stream, register and create producer object


        # set up initial instrument to register
        process_obj = IonObject(RT.DataProcess, name='Proc1',description='a data process transform')
        process_id, rev = self.rrclient.create(process_obj)

        dataproduct_obj = IonObject(RT.DataProduct, name='DataProduct1',description='sample data product')
        dataproduct_id, rev = self.rrclient.create(dataproduct_obj)


        # test registering a new process
        try:
            ds_id = self.client.register_process(process_id)
        except BadRequest as ex:
            self.fail("failed to create new data producer: %s" %ex)
        print 'new data producer id = ', ds_id

        # test assigning a data product to a process, no stream create
        try:
            self.client.assign_data_product(process_id, dataproduct_id, False)
        except BadRequest as ex:
            self.fail("failed to create new data producer: %s" %ex)
        except NotFound as ex:
            self.fail("failed to create new data producer: %s" %ex)


        # test UNassigning a data product from the data process, deleting the stream for the product
        try:
            self.client.unassign_data_product(process_id, dataproduct_id, False)
        except BadRequest as ex:
            self.fail("failed to failed to UNassign data product to data producer data producer: %s" %ex)
        except NotFound as ex:
            self.fail("failed to failed to UNassign data product to data producer data producer: %s" %ex)

        # test UNregistering a process
        try:
            ds_id = self.client.unregister_process(process_id)
        except NotFound as ex:
            self.fail("failed to unregister instrument producer: %s" %ex)


    #@unittest.skip('not ready')
    def test_eoi_resources(self):

            #
            # test creating a new data provider
            #
            print 'Creating new external_data_provider'
            dataprovider_obj = IonObject(RT.ExternalDataProvider,
                               name='ExtDataProvider1',
                               description='external data provider ')
            try:
                dataprovider_id = self.client.create_external_data_provider(dataprovider_obj)
            except BadRequest as ex:
                self.fail("failed to create new data provider: %s" %ex)
            print 'new data provider id = ', dataprovider_id

            #
            # test creating a new data source
            #
            print 'Creating new data source'
            datasource_obj = IonObject(RT.DataSource,
                               name='DataSource1',
                               description='data source ',
                               type='DAP')
            try:
                datasource_id = self.client.create_data_source(datasource_obj)
            except BadRequest as ex:
                self.fail("failed to create new data source: %s" %ex)
            print 'new data source id = ', datasource_id

            #
            # test creating a new data source model
            #
            print 'Creating new data source model'
            datamodel_obj = IonObject(RT.DataSourceModel,
                               name='DataSourceModel1',
                               description='data source model',
                               model='model1')
            try:
                datamodel_id = self.client.create_data_source_model(datamodel_obj)
            except BadRequest as ex:
                self.fail("failed to create new data source model: %s" %ex)
            print 'new data source model id = ', datamodel_id


            #
            # test creating a new external data set
            #
            print 'Creating new external data set'
            dataset_obj = IonObject(RT.ExternalDataset,
                               name='ExternalDataSet1',
                               description='external data set ')
            try:
                extdataset_id = self.client.create_external_dataset(dataset_obj)
            except BadRequest as ex:
                self.fail("failed to create new external data set: %s" %ex)
            print 'new external data set id = ', extdataset_id


            #
            # test creating a new dataset agent instance
            #
            print 'Creating new external data agent '
            datasetagent_obj = IonObject(RT.ExternalDatasetAgent,
                               name='ExternalDatasetAgent1',
                               description='external data agent ')
            try:
                datasetagent_id = self.client.create_external_dataset_agent(datasetagent_obj)
            except BadRequest as ex:
                self.fail("failed to create new external dataset agent: %s" %ex)
            print 'new external data agent  id = ', datasetagent_id


            #
            # test creating a new datasource agent instance
            #
            print 'Creating new  data source agent '
            datasourceagent_obj = IonObject(RT.DataSourceAgent,
                               name='DataSourceAgent1',
                               description=' DataSource agent ')
            try:
                datasource_agent_id = self.client.create_data_source_agent(datasourceagent_obj)
            except BadRequest as ex:
                self.fail("failed to create new external datasource agent: %s" %ex)
            print 'new external data agent  id = ', datasource_agent_id





            #
            # test creating a new dataset agent instance
            #
            print 'Creating new external dataset agent instance'
            datasetagentinstance_obj = IonObject(RT.ExternalDatasetAgentInstance,
                               name='ExternalDatasetAgentInstance1',
                               description='external dataset agent instance ')
            try:
                datasetagentinstance_id = self.client.create_external_dataset_agent_instance(datasetagentinstance_obj, datasetagent_id)
            except BadRequest as ex:
                self.fail("failed to create new external dataset agent instance: %s" %ex)
            print 'new external data agent instance id = ', datasetagentinstance_id

            #
            # test creating a new datasource agent instance
            #
            print 'Creating new  data source agent '
            datasourceagentinstance_obj = IonObject(RT.DataSourceAgentInstance,
                               name='ExternalDataSourceAgentInstance1',
                               description='external DataSource agent instance ')
            try:
                datasource_agent_instance_id = self.client.create_data_source_agent_instance(datasourceagentinstance_obj)
            except BadRequest as ex:
                self.fail("failed to create new external datasource agent instance: %s" %ex)
            print 'new external data agent  id = ', datasource_agent_instance_id

            #
            # test assign / unassign
            #

            self.client.unassign_data_source_from_external_data_provider(datasource_id, dataprovider_id)

            self.client.unassign_data_source_from_data_model(datasource_id, datamodel_id)

            self.client.unassign_external_dataset_from_data_source(extdataset_id, datasource_id)

            #
            # test read
            #

            try:
                dp_obj = self.client.read_external_data_provider(dataprovider_id)
            except NotFound as ex:
                self.fail("existing data provicer was not found during read")
            else:
                pass

            try:
                dp_obj = self.client.read_data_source(datasource_id)
            except NotFound as ex:
                self.fail("existing data source was not found during read")
            else:
                pass

            #
            # test delete
            #
            try:
                self.client.delete_external_data_provider(dataprovider_id)
                self.client.delete_data_source(datasource_id)
                self.client.delete_external_dataset(extdataset_id)
            except NotFound as ex:
                self.fail("existing data product was not found during delete")


            # test reading a non-existent data product
            print 'reading non-existent data product'
            try:
                bad_obj = self.client.read_external_data_provider('some_fake_id')
            except NotFound as ex:
                pass
            else:
                self.fail("non-existing data product was found during read: %s" %bad_obj)
コード例 #3
0
class TestIntDataProcessManagementServiceMultiOut(IonIntegrationTestCase):

    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.pubsubclient =  PubsubManagementServiceClient(node=self.container.node)
        self.ingestclient = IngestionManagementServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node)
        self.datasetclient =  DatasetManagementServiceClient(node=self.container.node)
        self.dataset_management = self.datasetclient

    def test_createDataProcess(self):

        #---------------------------------------------------------------------------
        # Data Process Definition
        #---------------------------------------------------------------------------
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='ctd_L0_all',
                            description='transform ctd package into three separate L0 streams',
                            module='ion.processes.data.transforms.ctd.ctd_L0_all',
                            class_name='ctd_L0_all')
        dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)

        # Make assertion on the newly registered data process definition
        data_process_definition = self.rrclient.read(dprocdef_id)
        self.assertEquals(data_process_definition.name, 'ctd_L0_all')
        self.assertEquals(data_process_definition.description, 'transform ctd package into three separate L0 streams')
        self.assertEquals(data_process_definition.module, 'ion.processes.data.transforms.ctd.ctd_L0_all')
        self.assertEquals(data_process_definition.class_name, 'ctd_L0_all')

        # Read the data process definition using data process management and make assertions
        dprocdef_obj = self.dataprocessclient.read_data_process_definition(dprocdef_id)
        self.assertEquals(dprocdef_obj.class_name,'ctd_L0_all')
        self.assertEquals(dprocdef_obj.module,'ion.processes.data.transforms.ctd.ctd_L0_all')

        #---------------------------------------------------------------------------
        # Create an input instrument
        #---------------------------------------------------------------------------

        instrument_obj = IonObject(RT.InstrumentDevice, name='Inst1',description='an instrument that is creating the data product')
        instrument_id, rev = self.rrclient.create(instrument_obj)

        # Register the instrument so that the data producer and stream object are created
        data_producer_id = self.damsclient.register_instrument(instrument_id)

        # create a stream definition for the data from the ctd simulator
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        ctd_stream_def_id = self.pubsubclient.create_stream_definition(name='Simulated CTD data', parameter_dictionary_id=pdict_id)

        self.dataprocessclient.assign_input_stream_definition_to_data_process_definition(ctd_stream_def_id, dprocdef_id )

        # Assert that the link between the stream definition and the data process definition was done
        assocs = self.rrclient.find_associations(subject=dprocdef_id, predicate=PRED.hasInputStreamDefinition, object=ctd_stream_def_id, id_only=True)

        self.assertIsNotNone(assocs)

        #---------------------------------------------------------------------------
        # Input Data Product
        #---------------------------------------------------------------------------
        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()


        input_dp_obj = IonObject(   RT.DataProduct,
                                    name='InputDataProduct',
                                    description='some new dp',
                                    temporal_domain = tdom,
                                    spatial_domain = sdom)

        input_dp_id = self.dataproductclient.create_data_product(data_product=input_dp_obj, stream_definition_id=ctd_stream_def_id, exchange_point='test')

        #Make assertions on the input data product created
        input_dp_obj = self.rrclient.read(input_dp_id)
        self.assertEquals(input_dp_obj.name, 'InputDataProduct')
        self.assertEquals(input_dp_obj.description, 'some new dp')

        self.damsclient.assign_data_product(instrument_id, input_dp_id)

        # Retrieve the stream via the DataProduct->Stream associations
        stream_ids, _ = self.rrclient.find_objects(input_dp_id, PRED.hasStream, None, True)

        self.in_stream_id = stream_ids[0]

        #---------------------------------------------------------------------------
        # Output Data Product
        #---------------------------------------------------------------------------

        outgoing_stream_conductivity_id = self.pubsubclient.create_stream_definition(name='conductivity', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_conductivity_id, dprocdef_id,binding='conductivity' )

        outgoing_stream_pressure_id = self.pubsubclient.create_stream_definition(name='pressure', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_pressure_id, dprocdef_id, binding='pressure' )

        outgoing_stream_temperature_id = self.pubsubclient.create_stream_definition(name='temperature', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_temperature_id, dprocdef_id, binding='temperature' )


        self.output_products={}

        output_dp_obj = IonObject(RT.DataProduct,
            name='conductivity',
            description='transform output conductivity',
            temporal_domain = tdom,
            spatial_domain = sdom)

        output_dp_id_1 = self.dataproductclient.create_data_product(output_dp_obj, outgoing_stream_conductivity_id)
        self.output_products['conductivity'] = output_dp_id_1

        output_dp_obj = IonObject(RT.DataProduct,
            name='pressure',
            description='transform output pressure',
            temporal_domain = tdom,
            spatial_domain = sdom)

        output_dp_id_2 = self.dataproductclient.create_data_product(output_dp_obj, outgoing_stream_pressure_id)
        self.output_products['pressure'] = output_dp_id_2

        output_dp_obj = IonObject(RT.DataProduct,
            name='temperature',
            description='transform output ',
            temporal_domain = tdom,
            spatial_domain = sdom)

        output_dp_id_3 = self.dataproductclient.create_data_product(output_dp_obj, outgoing_stream_temperature_id)
        self.output_products['temperature'] = output_dp_id_3


        #---------------------------------------------------------------------------
        # Create the data process
        #---------------------------------------------------------------------------
        def _create_data_process():
            dproc_id = self.dataprocessclient.create_data_process(dprocdef_id, [input_dp_id], self.output_products)
            return dproc_id

        dproc_id = _create_data_process()

        # Make assertions on the data process created
        data_process = self.dataprocessclient.read_data_process(dproc_id)

        # Assert that the data process has a process id attached
        self.assertIsNotNone(data_process.process_id)

        # Assert that the data process got the input data product's subscription id attached as its own input_susbcription_id attribute
        self.assertIsNotNone(data_process.input_subscription_id)

        output_data_product_ids = self.rrclient.find_objects(subject=dproc_id, predicate=PRED.hasOutputProduct, object_type=RT.DataProduct, id_only=True)

        self.assertEquals(Set(output_data_product_ids[0]), Set([output_dp_id_1,output_dp_id_2,output_dp_id_3]))


    @patch.dict(CFG, {'endpoint':{'receive':{'timeout': 60}}})
    def test_createDataProcessUsingSim(self):
        #-------------------------------
        # Create InstrumentModel
        #-------------------------------
        instModel_obj = IonObject(RT.InstrumentModel, name='SBE37IMModel', description="SBE37IMModel" )
        instModel_id = self.imsclient.create_instrument_model(instModel_obj)

        #-------------------------------
        # Create InstrumentAgent
        #-------------------------------
        instAgent_obj = IonObject(RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", driver_module="mi.instrument.seabird.sbe37smb.ooicore.driver", driver_class="SBE37Driver" )
        instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj)

        self.imsclient.assign_instrument_model_to_instrument_agent(instModel_id, instAgent_id)

        #-------------------------------
        # Create InstrumentDevice
        #-------------------------------
        instDevice_obj = IonObject(RT.InstrumentDevice, name='SBE37IMDevice', description="SBE37IMDevice", serial_number="12345" )
        instDevice_id = self.imsclient.create_instrument_device(instrument_device=instDevice_obj)
        self.imsclient.assign_instrument_model_to_instrument_device(instModel_id, instDevice_id)

        #-------------------------------
        # Create InstrumentAgentInstance to hold configuration information
        #-------------------------------


        port_agent_config = {
            'device_addr': 'sbe37-simulator.oceanobservatories.org',
            'device_port': 4001,
            'process_type': PortAgentProcessType.UNIX,
            'binary_path': "port_agent",
            'command_port': 4002,
            'data_port': 4003,
            'log_level': 5,
        }

        instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='SBE37IMAgentInstance', description="SBE37IMAgentInstance", svr_addr="localhost",
                                          comms_device_address=CFG.device.sbe37.host, comms_device_port=CFG.device.sbe37.port,
                                          port_agent_config = port_agent_config)
        instAgentInstance_id = self.imsclient.create_instrument_agent_instance(instAgentInstance_obj, instAgent_id, instDevice_id)


        #-------------------------------
        # Create CTD Parsed as the first data product
        #-------------------------------
        # create a stream definition for the data from the ctd simulator
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        ctd_stream_def_id = self.pubsubclient.create_stream_definition(name='SBE32_CDM', parameter_dictionary_id=pdict_id)

        # Construct temporal and spatial Coordinate Reference System objects
        tdom, sdom = time_series_domain()

        sdom = sdom.dump()
        tdom = tdom.dump()



        dp_obj = IonObject(RT.DataProduct,
            name='ctd_parsed',
            description='ctd stream test',
            temporal_domain = tdom,
            spatial_domain = sdom)

        ctd_parsed_data_product = self.dataproductclient.create_data_product(dp_obj, ctd_stream_def_id)

        self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=ctd_parsed_data_product)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(ctd_parsed_data_product, PRED.hasStream, None, True)

        #-------------------------------
        # Create CTD Raw as the second data product
        #-------------------------------
        raw_stream_def_id = self.pubsubclient.create_stream_definition(name='SBE37_RAW', parameter_dictionary_id=pdict_id)

        dp_obj.name = 'ctd_raw'
        ctd_raw_data_product = self.dataproductclient.create_data_product(dp_obj, raw_stream_def_id)

        self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=ctd_raw_data_product)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(ctd_raw_data_product, PRED.hasStream, None, True)

        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Data Process Definition
        #-------------------------------
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='ctd_L0_all',
                            description='transform ctd package into three separate L0 streams',
                            module='ion.processes.data.transforms.ctd.ctd_L0_all',
                            class_name='ctd_L0_all')
        ctd_L0_all_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
            
        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Output Data Products
        #-------------------------------

        outgoing_stream_l0_conductivity_id = self.pubsubclient.create_stream_definition(name='L0_Conductivity', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_conductivity_id, ctd_L0_all_dprocdef_id, binding='conductivity' )

        outgoing_stream_l0_pressure_id = self.pubsubclient.create_stream_definition(name='L0_Pressure', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_pressure_id, ctd_L0_all_dprocdef_id, binding='pressure' )

        outgoing_stream_l0_temperature_id = self.pubsubclient.create_stream_definition(name='L0_Temperature', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_temperature_id, ctd_L0_all_dprocdef_id, binding='temperature' )


        self.output_products={}

        ctd_l0_conductivity_output_dp_obj = IonObject(  RT.DataProduct,
                                                        name='L0_Conductivity',
                                                        description='transform output conductivity',
                                                        temporal_domain = tdom,
                                                        spatial_domain = sdom)


        ctd_l0_conductivity_output_dp_id = self.dataproductclient.create_data_product(ctd_l0_conductivity_output_dp_obj,
                                                                                outgoing_stream_l0_conductivity_id)
        self.output_products['conductivity'] = ctd_l0_conductivity_output_dp_id

        ctd_l0_pressure_output_dp_obj = IonObject(RT.DataProduct,
            name='L0_Pressure',
            description='transform output pressure',
            temporal_domain = tdom,
            spatial_domain = sdom)

        ctd_l0_pressure_output_dp_id = self.dataproductclient.create_data_product(ctd_l0_pressure_output_dp_obj,
                                                                                    outgoing_stream_l0_pressure_id)
        self.output_products['pressure'] = ctd_l0_pressure_output_dp_id

        ctd_l0_temperature_output_dp_obj = IonObject(RT.DataProduct,
            name='L0_Temperature',
            description='transform output temperature',
            temporal_domain = tdom,
            spatial_domain = sdom)


        ctd_l0_temperature_output_dp_id = self.dataproductclient.create_data_product(ctd_l0_temperature_output_dp_obj,
                                                                                    outgoing_stream_l0_temperature_id)
        self.output_products['temperature'] = ctd_l0_temperature_output_dp_id


        #-------------------------------
        # Create listener for data process events and verify that events are received.
        #-------------------------------

        # todo: add this validate for Req: L4-CI-SA-RQ-367  Data processing shall notify registered data product consumers about data processing workflow life cycle events
        #todo (contd) ... I believe the capability does not exist yet now. ANS And SA are not yet publishing any workflow life cycle events (Swarbhanu)
        
        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Create the data process
        #-------------------------------
        ctd_l0_all_data_process_id = self.dataprocessclient.create_data_process(ctd_L0_all_dprocdef_id, [ctd_parsed_data_product], self.output_products)

        #-------------------------------
        # Retrieve a list of all data process defintions in RR and validate that the DPD is listed
        #-------------------------------

        # todo: add this validate for Req: L4-CI-SA-RQ-366  Data processing shall manage data topic definitions
        # todo: This capability is not yet completed (Swarbhanu)
        self.dataprocessclient.activate_data_process(ctd_l0_all_data_process_id)
        

        #todo: check that activate event is received L4-CI-SA-RQ-367
        #todo... (it looks like no event is being published when the data process is activated... so below, we just check for now
        # todo... that the subscription is indeed activated) (Swarbhanu)


        # todo: monitor process to see if it is active (sa-rq-182)
        ctd_l0_all_data_process = self.rrclient.read(ctd_l0_all_data_process_id)
        input_subscription_id = ctd_l0_all_data_process.input_subscription_id
        subs = self.rrclient.read(input_subscription_id)
        self.assertTrue(subs.activated)

        # todo: This has not yet been completed by CEI, will prbly surface thru a DPMS call
        self.dataprocessclient.deactivate_data_process(ctd_l0_all_data_process_id)


        #-------------------------------
        # Retrieve the extended resources for data process definition and for data process
        #-------------------------------
        extended_process_definition = self.dataprocessclient.get_data_process_definition_extension(ctd_L0_all_dprocdef_id)
        self.assertEqual(1, len(extended_process_definition.data_processes))
        log.debug("test_createDataProcess: extended_process_definition  %s", str(extended_process_definition))

        extended_process = self.dataprocessclient.get_data_process_extension(ctd_l0_all_data_process_id)
        self.assertEqual(1, len(extended_process.input_data_products))
        log.debug("test_createDataProcess: extended_process  %s", str(extended_process))

        #-------------------------------
        # Cleanup
        #-------------------------------

        self.dataprocessclient.delete_data_process(ctd_l0_all_data_process_id)
        self.dataprocessclient.delete_data_process_definition(ctd_L0_all_dprocdef_id)

        self.dataprocessclient.force_delete_data_process(ctd_l0_all_data_process_id)
        self.dataprocessclient.force_delete_data_process_definition(ctd_L0_all_dprocdef_id)
コード例 #4
0
class TestOmsLaunch(IonIntegrationTestCase):

    def setUp(self):
        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.omsclient = ObservatoryManagementServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.dpclient = DataProductManagementServiceClient(node=self.container.node)
        self.pubsubcli = PubsubManagementServiceClient(node=self.container.node)
        self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node)
        self.dataset_management = DatasetManagementServiceClient()


        self.platformModel_id = None

        # rsn_oms: to retrieve network structure and information from RSN-OMS:
        # Note that OmsClientFactory will create an "embedded" RSN OMS
        # simulator object by default.
        self.rsn_oms = OmsClientFactory.create_instance()

        self.all_platforms = {}
        self.topology = {}
        self.agent_device_map = {}
        self.agent_streamconfig_map = {}

        self._async_data_result = AsyncResult()
        self._data_subscribers = []
        self._samples_received = []
        self.addCleanup(self._stop_data_subscribers)

        self._async_event_result = AsyncResult()
        self._event_subscribers = []
        self._events_received = []
        self.addCleanup(self._stop_event_subscribers)
        self._start_event_subscriber()

        self._set_up_DataProduct_obj()
        self._set_up_PlatformModel_obj()

    def _set_up_DataProduct_obj(self):
        # Create data product object to be used for each of the platform log streams
        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()

        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('platform_eng_parsed', id_only=True)
        self.platform_eng_stream_def_id = self.pubsubcli.create_stream_definition(
            name='platform_eng', parameter_dictionary_id=pdict_id)
        self.dp_obj = IonObject(RT.DataProduct,
            name='platform_eng data',
            description='platform_eng test',
            temporal_domain = tdom,
            spatial_domain = sdom)

    def _set_up_PlatformModel_obj(self):
        # Create PlatformModel
        platformModel_obj = IonObject(RT.PlatformModel,
                                      name='RSNPlatformModel',
                                      description="RSNPlatformModel")
        try:
            self.platformModel_id = self.imsclient.create_platform_model(platformModel_obj)
        except BadRequest as ex:
            self.fail("failed to create new PLatformModel: %s" %ex)
        log.debug( 'new PlatformModel id = %s', self.platformModel_id)

    def _traverse(self, platform_id, parent_platform_objs=None):
        """
        Recursive routine that repeatedly calls _prepare_platform to build
        the object dictionary for each platform.

        @param platform_id ID of the platform to be visited
        @param parent_platform_objs dict of objects associated to parent
                        platform, if any.

        @retval the dict returned by _prepare_platform at this level.
        """

        log.info("Starting _traverse for %r", platform_id)

        plat_objs = self._prepare_platform(platform_id, parent_platform_objs)

        self.all_platforms[platform_id] = plat_objs

        # now, traverse the children:
        retval = self.rsn_oms.getSubplatformIDs(platform_id)
        subplatform_ids = retval[platform_id]
        for subplatform_id in subplatform_ids:
            self._traverse(subplatform_id, plat_objs)

        # note, topology indexed by platform_id
        self.topology[platform_id] = plat_objs['children']

        return plat_objs

    def _prepare_platform(self, platform_id, parent_platform_objs):
        """
        This routine generalizes the manual construction currently done in
        test_oms_launch.py. It is called by the recursive _traverse method so
        all platforms starting from a given base platform are prepared.

        Note: For simplicity in this test, sites are organized in the same
        hierarchical way as the platforms themselves.

        @param platform_id ID of the platform to be visited
        @param parent_platform_objs dict of objects associated to parent
                        platform, if any.

        @retval a dict of associated objects similar to those in
                test_oms_launch
        """

        site__obj = IonObject(RT.PlatformSite,
                            name='%s_PlatformSite' % platform_id,
                            description='%s_PlatformSite platform site' % platform_id)

        site_id = self.omsclient.create_platform_site(site__obj)

        if parent_platform_objs:
            # establish hasSite association with the parent
            self.rrclient.create_association(
                subject=parent_platform_objs['site_id'],
                predicate=PRED.hasSite,
                object=site_id)

        # prepare platform attributes and ports:
        monitor_attributes = self._prepare_platform_attributes(platform_id)
        ports =              self._prepare_platform_ports(platform_id)

        device__obj = IonObject(RT.PlatformDevice,
                        name='%s_PlatformDevice' % platform_id,
                        description='%s_PlatformDevice platform device' % platform_id,
                        ports=ports,
                        platform_monitor_attributes = monitor_attributes)

        device_id = self.imsclient.create_platform_device(device__obj)

        self.imsclient.assign_platform_model_to_platform_device(self.platformModel_id, device_id)
        self.rrclient.create_association(subject=site_id, predicate=PRED.hasDevice, object=device_id)
        self.damsclient.register_instrument(instrument_id=device_id)


        if parent_platform_objs:
            # establish hasDevice association with the parent
            self.rrclient.create_association(
                subject=parent_platform_objs['device_id'],
                predicate=PRED.hasDevice,
                object=device_id)

        agent__obj = IonObject(RT.PlatformAgent,
                            name='%s_PlatformAgent' % platform_id,
                            description='%s_PlatformAgent platform agent' % platform_id)

        agent_id = self.imsclient.create_platform_agent(agent__obj)

        if parent_platform_objs:
            # add this platform_id to parent's children:
            parent_platform_objs['children'].append(platform_id)


        self.imsclient.assign_platform_model_to_platform_agent(self.platformModel_id, agent_id)

#        agent_instance_obj = IonObject(RT.PlatformAgentInstance,
#                                name='%s_PlatformAgentInstance' % platform_id,
#                                description="%s_PlatformAgentInstance" % platform_id)
#
#        agent_instance_id = self.imsclient.create_platform_agent_instance(
#                            agent_instance_obj, agent_id, device_id)

        plat_objs = {
            'platform_id':        platform_id,
            'site__obj':          site__obj,
            'site_id':            site_id,
            'device__obj':        device__obj,
            'device_id':          device_id,
            'agent__obj':         agent__obj,
            'agent_id':           agent_id,
#            'agent_instance_obj': agent_instance_obj,
#            'agent_instance_id':  agent_instance_id,
            'children':           []
        }

        log.info("plat_objs for platform_id %r = %s", platform_id, str(plat_objs))

        self.agent_device_map[platform_id] = device__obj

        stream_config = self._create_stream_config(plat_objs)
        self.agent_streamconfig_map[platform_id] = stream_config
#        self._start_data_subscriber(agent_instance_id, stream_config)

        return plat_objs

    def _prepare_platform_attributes(self, platform_id):
        """
        Returns the list of PlatformMonitorAttributes objects corresponding to
        the attributes associated to the given platform.
        """
        result = self.rsn_oms.getPlatformAttributes(platform_id)
        self.assertTrue(platform_id in result)
        ret_infos = result[platform_id]

        monitor_attributes = []
        for attrName, attrDfn in ret_infos.iteritems():
            log.debug("platform_id=%r: preparing attribute=%r", platform_id, attrName)

            monitor_rate = attrDfn['monitorCycleSeconds']
            units =        attrDfn['units']

            plat_attr_obj = IonObject(OT.PlatformMonitorAttributes,
                                      id=attrName,
                                      monitor_rate=monitor_rate,
                                      units=units)

            monitor_attributes.append(plat_attr_obj)

        return monitor_attributes

    def _prepare_platform_ports(self, platform_id):
        """
        Returns the list of PlatformPort objects corresponding to the ports
        associated to the given platform.
        """
        result = self.rsn_oms.getPlatformPorts(platform_id)
        self.assertTrue(platform_id in result)
        port_dict = result[platform_id]

        ports = []
        for port_id, port in port_dict.iteritems():
            log.debug("platform_id=%r: preparing port=%r", platform_id, port_id)
            ip_address = port['comms']['ip']
            plat_port_obj = IonObject(OT.PlatformPort,
                                      port_id=port_id,
                                      ip_address=ip_address)

            ports.append(plat_port_obj)

        return ports

    def _create_stream_config(self, plat_objs):

        platform_id = plat_objs['platform_id']
        device_id =   plat_objs['device_id']


        #create the log data product
        self.dp_obj.name = '%s platform_eng data' % platform_id
        data_product_id = self.dpclient.create_data_product(data_product=self.dp_obj, stream_definition_id=self.platform_eng_stream_def_id)
        self.damsclient.assign_data_product(input_resource_id=device_id, data_product_id=data_product_id)
        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(data_product_id, PRED.hasStream, None, True)

        stream_config = self._build_stream_config(stream_ids[0])
        return stream_config

    def _build_stream_config(self, stream_id=''):

        platform_eng_dictionary = DatasetManagementService.get_parameter_dictionary_by_name('platform_eng_parsed')

        #get the streamroute object from pubsub by passing the stream_id
        stream_def_ids, _ = self.rrclient.find_objects(stream_id,
            PRED.hasStreamDefinition,
            RT.StreamDefinition,
            True)


        stream_route = self.pubsubcli.read_stream_route(stream_id=stream_id)
        stream_config = {'routing_key' : stream_route.routing_key,
                         'stream_id' : stream_id,
                         'stream_definition_ref' : stream_def_ids[0],
                         'exchange_point' : stream_route.exchange_point,
                         'parameter_dictionary':platform_eng_dictionary.dump()}

        return stream_config

    def _set_platform_agent_instances(self):
        """
        Once most of the objs/defs associated with all platforms are in
        place, this method creates and associates the PlatformAgentInstance
        elements.
        """

        self.platform_configs = {}
        for platform_id, plat_objs in self.all_platforms.iteritems():

            PLATFORM_CONFIG = self.platform_configs[platform_id] = {
                'platform_id':             platform_id,
                'platform_topology':       self.topology,

#                'agent_device_map':        self.agent_device_map,
                'agent_streamconfig_map':  self.agent_streamconfig_map,

                'driver_config':           DVR_CONFIG,
            }

            agent_config = {
                'platform_config': PLATFORM_CONFIG,
            }

            agent_instance_obj = IonObject(RT.PlatformAgentInstance,
                                    name='%s_PlatformAgentInstance' % platform_id,
                                    description="%s_PlatformAgentInstance" % platform_id,
                                    agent_config=agent_config)

            agent_id = plat_objs['agent_id']
            device_id = plat_objs['device_id']
            agent_instance_id = self.imsclient.create_platform_agent_instance(
                                agent_instance_obj, agent_id, device_id)

            plat_objs['agent_instance_obj'] = agent_instance_obj
            plat_objs['agent_instance_id']  = agent_instance_id


            stream_config = self.agent_streamconfig_map[platform_id]
            self._start_data_subscriber(agent_instance_id, stream_config)


    def _start_data_subscriber(self, stream_name, stream_config):
        """
        Starts data subscriber for the given stream_name and stream_config
        """

        def consume_data(message, stream_route, stream_id):
            # A callback for processing subscribed-to data.
            log.info('Subscriber received data message: %s.', str(message))
            self._samples_received.append(message)
            self._async_data_result.set()

        log.info('_start_data_subscriber stream_name=%r', stream_name)

        stream_id = stream_config['stream_id']

        # Create subscription for the stream
        exchange_name = '%s_queue' % stream_name
        self.container.ex_manager.create_xn_queue(exchange_name).purge()
        sub = StandaloneStreamSubscriber(exchange_name, consume_data)
        sub.start()
        self._data_subscribers.append(sub)
        sub_id = self.pubsubcli.create_subscription(name=exchange_name, stream_ids=[stream_id])
        self.pubsubcli.activate_subscription(sub_id)
        sub.subscription_id = sub_id

    def _stop_data_subscribers(self):
        """
        Stop the data subscribers on cleanup.
        """
        try:
            for sub in self._data_subscribers:
                if hasattr(sub, 'subscription_id'):
                    try:
                        self.pubsubcli.deactivate_subscription(sub.subscription_id)
                    except:
                        pass
                    self.pubsubcli.delete_subscription(sub.subscription_id)
                sub.stop()
        finally:
            self._data_subscribers = []

    def _start_event_subscriber(self, event_type="PlatformAlarmEvent", sub_type="power"):
        """
        Starts event subscriber for events of given event_type ("PlatformAlarmEvent"
        by default) and given sub_type ("power" by default).
        """
        # TODO note: ion-definitions still using 'PlatformAlarmEvent' but we
        # should probably define 'PlatformExternalEvent' or something like that.

        def consume_event(evt, *args, **kwargs):
            # A callback for consuming events.
            log.info('Event subscriber received evt: %s.', str(evt))
            self._events_received.append(evt)
            self._async_event_result.set(evt)

        sub = EventSubscriber(event_type=event_type,
                              sub_type=sub_type,
                              callback=consume_event)

        sub.start()
        log.info("registered event subscriber for event_type=%r, sub_type=%r",
                 event_type, sub_type)

        self._event_subscribers.append(sub)
        sub._ready_event.wait(timeout=EVENT_TIMEOUT)

    def _stop_event_subscribers(self):
        """
        Stops the event subscribers on cleanup.
        """
        try:
            for sub in self._event_subscribers:
                if hasattr(sub, 'subscription_id'):
                    try:
                        self.pubsubcli.deactivate_subscription(sub.subscription_id)
                    except:
                        pass
                    self.pubsubcli.delete_subscription(sub.subscription_id)
                sub.stop()
        finally:
            self._event_subscribers = []

    def test_oms_create_and_launch(self):

        # pick a base platform:
        base_platform_id = BASE_PLATFORM_ID

        # and trigger the traversal of the branch rooted at that base platform
        # to create corresponding ION objects and configuration dictionaries:
        base_platform_objs = self._traverse(base_platform_id)

        # now that most of the topology information is there, add the
        # PlatformAgentInstance elements
        self._set_platform_agent_instances()

        base_platform_config = self.platform_configs[base_platform_id]

        log.info("base_platform_id = %r", base_platform_id)
        log.info("topology = %s", str(self.topology))


        #-------------------------------
        # Launch Base Platform AgentInstance, connect to the resource agent client
        #-------------------------------

        agent_instance_id = base_platform_objs['agent_instance_id']
        pid = self.imsclient.start_platform_agent_instance(platform_agent_instance_id=agent_instance_id)
        log.debug("start_platform_agent_instance returned pid=%s", pid)

        #wait for start
        instance_obj = self.imsclient.read_platform_agent_instance(agent_instance_id)
        gate = ProcessStateGate(self.processdispatchclient.read_process,
                                instance_obj.agent_process_id,
                                ProcessStateEnum.RUNNING)
        self.assertTrue(gate.await(90), "The platform agent instance did not spawn in 90 seconds")

        agent_instance_obj= self.imsclient.read_instrument_agent_instance(agent_instance_id)
        log.debug('test_oms_create_and_launch: Platform agent instance obj: %s', str(agent_instance_obj))

        # Start a resource agent client to talk with the instrument agent.
        self._pa_client = ResourceAgentClient('paclient', name=agent_instance_obj.agent_process_id,  process=FakeProcess())
        log.debug(" test_oms_create_and_launch:: got pa client %s", str(self._pa_client))

        log.debug("base_platform_config =\n%s", base_platform_config)

        # ping_agent can be issued before INITIALIZE
        retval = self._pa_client.ping_agent(timeout=TIMEOUT)
        log.debug( 'Base Platform ping_agent = %s', str(retval) )

        # issue INITIALIZE command to the base platform, which will launch the
        # creation of the whole platform hierarchy rooted at base_platform_config['platform_id']
#        cmd = AgentCommand(command=PlatformAgentEvent.INITIALIZE, kwargs=dict(plat_config=base_platform_config))
        cmd = AgentCommand(command=PlatformAgentEvent.INITIALIZE)
        retval = self._pa_client.execute_agent(cmd, timeout=TIMEOUT)
        log.debug( 'Base Platform INITIALIZE = %s', str(retval) )


        # GO_ACTIVE
        cmd = AgentCommand(command=PlatformAgentEvent.GO_ACTIVE)
        retval = self._pa_client.execute_agent(cmd, timeout=TIMEOUT)
        log.debug( 'Base Platform GO_ACTIVE = %s', str(retval) )

        # RUN: this command includes the launch of the resource monitoring greenlets
        cmd = AgentCommand(command=PlatformAgentEvent.RUN)
        retval = self._pa_client.execute_agent(cmd, timeout=TIMEOUT)
        log.debug( 'Base Platform RUN = %s', str(retval) )

        # START_EVENT_DISPATCH
        kwargs = dict(params="TODO set params")
        cmd = AgentCommand(command=PlatformAgentEvent.START_EVENT_DISPATCH, kwargs=kwargs)
        retval = self._pa_client.execute_agent(cmd, timeout=TIMEOUT)
        self.assertTrue(retval.result is not None)

        # wait for data sample
        # just wait for at least one -- see consume_data above
        log.info("waiting for reception of a data sample...")
        self._async_data_result.get(timeout=DATA_TIMEOUT)
        self.assertTrue(len(self._samples_received) >= 1)

        log.info("waiting a bit more for reception of more data samples...")
        sleep(10)
        log.info("Got data samples: %d", len(self._samples_received))


        # wait for event
        # just wait for at least one event -- see consume_event above
        log.info("waiting for reception of an event...")
        self._async_event_result.get(timeout=EVENT_TIMEOUT)
        log.info("Received events: %s", len(self._events_received))


        # STOP_EVENT_DISPATCH
        cmd = AgentCommand(command=PlatformAgentEvent.STOP_EVENT_DISPATCH)
        retval = self._pa_client.execute_agent(cmd, timeout=TIMEOUT)
        self.assertTrue(retval.result is not None)

        # GO_INACTIVE
        cmd = AgentCommand(command=PlatformAgentEvent.GO_INACTIVE)
        retval = self._pa_client.execute_agent(cmd, timeout=TIMEOUT)
        log.debug( 'Base Platform GO_INACTIVE = %s', str(retval) )

        # RESET: Resets the base platform agent, which includes termination of
        # its sub-platforms processes:
        cmd = AgentCommand(command=PlatformAgentEvent.RESET)
        retval = self._pa_client.execute_agent(cmd, timeout=TIMEOUT)
        log.debug( 'Base Platform RESET = %s', str(retval) )

        #-------------------------------
        # Stop Base Platform AgentInstance
        #-------------------------------
        self.imsclient.stop_platform_agent_instance(platform_agent_instance_id=agent_instance_id)
class TestIntDataProcessManagementServiceMultiOut(IonIntegrationTestCase):

    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.pubsubclient =  PubsubManagementServiceClient(node=self.container.node)
        self.ingestclient = IngestionManagementServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node)
        self.datasetclient =  DatasetManagementServiceClient(node=self.container.node)
        self.dataset_management = self.datasetclient
        self.process_dispatcher = ProcessDispatcherServiceClient(node=self.container.node)

    def test_createDataProcess(self):

        #---------------------------------------------------------------------------
        # Data Process Definition
        #---------------------------------------------------------------------------
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='ctd_L0_all',
                            description='transform ctd package into three separate L0 streams',
                            module='ion.processes.data.transforms.ctd.ctd_L0_all',
                            class_name='ctd_L0_all')
        dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)

        # Make assertion on the newly registered data process definition
        data_process_definition = self.rrclient.read(dprocdef_id)
        self.assertEquals(data_process_definition.name, 'ctd_L0_all')
        self.assertEquals(data_process_definition.description, 'transform ctd package into three separate L0 streams')
        self.assertEquals(data_process_definition.module, 'ion.processes.data.transforms.ctd.ctd_L0_all')
        self.assertEquals(data_process_definition.class_name, 'ctd_L0_all')

        # Read the data process definition using data process management and make assertions
        dprocdef_obj = self.dataprocessclient.read_data_process_definition(dprocdef_id)
        self.assertEquals(dprocdef_obj.class_name,'ctd_L0_all')
        self.assertEquals(dprocdef_obj.module,'ion.processes.data.transforms.ctd.ctd_L0_all')

        #---------------------------------------------------------------------------
        # Create an input instrument
        #---------------------------------------------------------------------------

        instrument_obj = IonObject(RT.InstrumentDevice, name='Inst1',description='an instrument that is creating the data product')
        instrument_id, rev = self.rrclient.create(instrument_obj)

        # Register the instrument so that the data producer and stream object are created
        data_producer_id = self.damsclient.register_instrument(instrument_id)

        # create a stream definition for the data from the ctd simulator
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        ctd_stream_def_id = self.pubsubclient.create_stream_definition(name='Simulated CTD data', parameter_dictionary_id=pdict_id)

        self.dataprocessclient.assign_input_stream_definition_to_data_process_definition(ctd_stream_def_id, dprocdef_id )

        # Assert that the link between the stream definition and the data process definition was done
        assocs = self.rrclient.find_associations(subject=dprocdef_id, predicate=PRED.hasInputStreamDefinition, object=ctd_stream_def_id, id_only=True)

        self.assertIsNotNone(assocs)

        #---------------------------------------------------------------------------
        # Input Data Product
        #---------------------------------------------------------------------------
        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()


        input_dp_obj = IonObject(   RT.DataProduct,
                                    name='InputDataProduct',
                                    description='some new dp',
                                    temporal_domain = tdom,
                                    spatial_domain = sdom)

        input_dp_id = self.dataproductclient.create_data_product(data_product=input_dp_obj, stream_definition_id=ctd_stream_def_id, exchange_point='test')

        #Make assertions on the input data product created
        input_dp_obj = self.rrclient.read(input_dp_id)
        self.assertEquals(input_dp_obj.name, 'InputDataProduct')
        self.assertEquals(input_dp_obj.description, 'some new dp')

        self.damsclient.assign_data_product(instrument_id, input_dp_id)

        # Retrieve the stream via the DataProduct->Stream associations
        stream_ids, _ = self.rrclient.find_objects(input_dp_id, PRED.hasStream, None, True)

        self.in_stream_id = stream_ids[0]

        #---------------------------------------------------------------------------
        # Output Data Product
        #---------------------------------------------------------------------------

        outgoing_stream_conductivity_id = self.pubsubclient.create_stream_definition(name='conductivity', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_conductivity_id, dprocdef_id,binding='conductivity' )

        outgoing_stream_pressure_id = self.pubsubclient.create_stream_definition(name='pressure', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_pressure_id, dprocdef_id, binding='pressure' )

        outgoing_stream_temperature_id = self.pubsubclient.create_stream_definition(name='temperature', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_temperature_id, dprocdef_id, binding='temperature' )


        self.output_products={}

        output_dp_obj = IonObject(RT.DataProduct,
            name='conductivity',
            description='transform output conductivity',
            temporal_domain = tdom,
            spatial_domain = sdom)

        output_dp_id_1 = self.dataproductclient.create_data_product(output_dp_obj, outgoing_stream_conductivity_id)
        self.output_products['conductivity'] = output_dp_id_1

        output_dp_obj = IonObject(RT.DataProduct,
            name='pressure',
            description='transform output pressure',
            temporal_domain = tdom,
            spatial_domain = sdom)

        output_dp_id_2 = self.dataproductclient.create_data_product(output_dp_obj, outgoing_stream_pressure_id)
        self.output_products['pressure'] = output_dp_id_2

        output_dp_obj = IonObject(RT.DataProduct,
            name='temperature',
            description='transform output ',
            temporal_domain = tdom,
            spatial_domain = sdom)

        output_dp_id_3 = self.dataproductclient.create_data_product(output_dp_obj, outgoing_stream_temperature_id)
        self.output_products['temperature'] = output_dp_id_3


        #---------------------------------------------------------------------------
        # Create the data process
        #---------------------------------------------------------------------------
        def _create_data_process():
            dproc_id = self.dataprocessclient.create_data_process(dprocdef_id, [input_dp_id], self.output_products)
            return dproc_id

        dproc_id = _create_data_process()

        # Make assertions on the data process created
        data_process = self.dataprocessclient.read_data_process(dproc_id)

        # Assert that the data process has a process id attached
        self.assertIsNotNone(data_process.process_id)

        # Assert that the data process got the input data product's subscription id attached as its own input_susbcription_id attribute
        self.assertIsNotNone(data_process.input_subscription_id)

        output_data_product_ids = self.rrclient.find_objects(subject=dproc_id, predicate=PRED.hasOutputProduct, object_type=RT.DataProduct, id_only=True)

        self.assertEquals(Set(output_data_product_ids[0]), Set([output_dp_id_1,output_dp_id_2,output_dp_id_3]))


    @patch.dict(CFG, {'endpoint':{'receive':{'timeout': 60}}})
    def test_createDataProcessUsingSim(self):
        #-------------------------------
        # Create InstrumentModel
        #-------------------------------
        instModel_obj = IonObject(RT.InstrumentModel, name='SBE37IMModel', description="SBE37IMModel" )
        instModel_id = self.imsclient.create_instrument_model(instModel_obj)

        #-------------------------------
        # Create InstrumentAgent
        #-------------------------------
        instAgent_obj = IonObject(RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", driver_uri="http://sddevrepo.oceanobservatories.org/releases/seabird_sbe37smb_ooicore-0.0.1-py2.7.egg")
        instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj)

        self.imsclient.assign_instrument_model_to_instrument_agent(instModel_id, instAgent_id)

        #-------------------------------
        # Create InstrumentDevice
        #-------------------------------
        instDevice_obj = IonObject(RT.InstrumentDevice, name='SBE37IMDevice', description="SBE37IMDevice", serial_number="12345" )
        instDevice_id = self.imsclient.create_instrument_device(instrument_device=instDevice_obj)
        self.imsclient.assign_instrument_model_to_instrument_device(instModel_id, instDevice_id)

        #-------------------------------
        # Create InstrumentAgentInstance to hold configuration information
        #-------------------------------


        port_agent_config = {
            'device_addr': 'sbe37-simulator.oceanobservatories.org',
            'device_port': 4001,
            'process_type': PortAgentProcessType.UNIX,
            'binary_path': "port_agent",
            'command_port': 4002,
            'data_port': 4003,
            'log_level': 5,
        }


        port_agent_config = {
            'device_addr':  CFG.device.sbe37.host,
            'device_port':  CFG.device.sbe37.port,
            'process_type': PortAgentProcessType.UNIX,
            'binary_path': "port_agent",
            'port_agent_addr': 'localhost',
            'command_port': CFG.device.sbe37.port_agent_cmd_port,
            'data_port': CFG.device.sbe37.port_agent_data_port,
            'log_level': 5,
            'type': PortAgentType.ETHERNET
        }

        instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='SBE37IMAgentInstance', description="SBE37IMAgentInstance",
                                          port_agent_config = port_agent_config)
        instAgentInstance_id = self.imsclient.create_instrument_agent_instance(instAgentInstance_obj, instAgent_id, instDevice_id)


        #-------------------------------
        # Create CTD Parsed as the first data product
        #-------------------------------
        # create a stream definition for the data from the ctd simulator
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        ctd_stream_def_id = self.pubsubclient.create_stream_definition(name='SBE32_CDM', parameter_dictionary_id=pdict_id)

        # Construct temporal and spatial Coordinate Reference System objects
        tdom, sdom = time_series_domain()

        sdom = sdom.dump()
        tdom = tdom.dump()



        dp_obj = IonObject(RT.DataProduct,
            name='ctd_parsed',
            description='ctd stream test',
            temporal_domain = tdom,
            spatial_domain = sdom)

        ctd_parsed_data_product = self.dataproductclient.create_data_product(dp_obj, ctd_stream_def_id)

        self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=ctd_parsed_data_product)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(ctd_parsed_data_product, PRED.hasStream, None, True)

        #-------------------------------
        # Create CTD Raw as the second data product
        #-------------------------------
        raw_stream_def_id = self.pubsubclient.create_stream_definition(name='SBE37_RAW', parameter_dictionary_id=pdict_id)

        dp_obj.name = 'ctd_raw'
        ctd_raw_data_product = self.dataproductclient.create_data_product(dp_obj, raw_stream_def_id)

        self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=ctd_raw_data_product)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(ctd_raw_data_product, PRED.hasStream, None, True)

        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Data Process Definition
        #-------------------------------
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='ctd_L0_all',
                            description='transform ctd package into three separate L0 streams',
                            module='ion.processes.data.transforms.ctd.ctd_L0_all',
                            class_name='ctd_L0_all')
        ctd_L0_all_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
            
        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Output Data Products
        #-------------------------------

        outgoing_stream_l0_conductivity_id = self.pubsubclient.create_stream_definition(name='L0_Conductivity', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_conductivity_id, ctd_L0_all_dprocdef_id, binding='conductivity' )

        outgoing_stream_l0_pressure_id = self.pubsubclient.create_stream_definition(name='L0_Pressure', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_pressure_id, ctd_L0_all_dprocdef_id, binding='pressure' )

        outgoing_stream_l0_temperature_id = self.pubsubclient.create_stream_definition(name='L0_Temperature', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_temperature_id, ctd_L0_all_dprocdef_id, binding='temperature' )


        self.output_products={}

        ctd_l0_conductivity_output_dp_obj = IonObject(  RT.DataProduct,
                                                        name='L0_Conductivity',
                                                        description='transform output conductivity',
                                                        temporal_domain = tdom,
                                                        spatial_domain = sdom)


        ctd_l0_conductivity_output_dp_id = self.dataproductclient.create_data_product(ctd_l0_conductivity_output_dp_obj,
                                                                                outgoing_stream_l0_conductivity_id)
        self.output_products['conductivity'] = ctd_l0_conductivity_output_dp_id

        ctd_l0_pressure_output_dp_obj = IonObject(RT.DataProduct,
            name='L0_Pressure',
            description='transform output pressure',
            temporal_domain = tdom,
            spatial_domain = sdom)

        ctd_l0_pressure_output_dp_id = self.dataproductclient.create_data_product(ctd_l0_pressure_output_dp_obj,
                                                                                    outgoing_stream_l0_pressure_id)
        self.output_products['pressure'] = ctd_l0_pressure_output_dp_id

        ctd_l0_temperature_output_dp_obj = IonObject(RT.DataProduct,
            name='L0_Temperature',
            description='transform output temperature',
            temporal_domain = tdom,
            spatial_domain = sdom)


        ctd_l0_temperature_output_dp_id = self.dataproductclient.create_data_product(ctd_l0_temperature_output_dp_obj,
                                                                                    outgoing_stream_l0_temperature_id)
        self.output_products['temperature'] = ctd_l0_temperature_output_dp_id


        #-------------------------------
        # Create listener for data process events and verify that events are received.
        #-------------------------------

        # todo: add this validate for Req: L4-CI-SA-RQ-367  Data processing shall notify registered data product consumers about data processing workflow life cycle events
        #todo (contd) ... I believe the capability does not exist yet now. ANS And SA are not yet publishing any workflow life cycle events (Swarbhanu)
        
        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Create the data process
        #-------------------------------

        ctd_l0_all_data_process_id = self.dataprocessclient.create_data_process(ctd_L0_all_dprocdef_id, [ctd_parsed_data_product], self.output_products)
        data_process = self.rrclient.read(ctd_l0_all_data_process_id)
        process_id = data_process.process_id
        self.addCleanup(self.process_dispatcher.cancel_process, process_id)

        #-------------------------------
        # Wait until the process launched in the create_data_process() method is actually running, before proceeding further in this test
        #-------------------------------

        gate = ProcessStateGate(self.process_dispatcher.read_process, process_id, ProcessStateEnum.RUNNING)
        self.assertTrue(gate.await(30), "The data process (%s) did not spawn in 30 seconds" % process_id)

        #-------------------------------
        # Retrieve a list of all data process defintions in RR and validate that the DPD is listed
        #-------------------------------

        # todo: Req: L4-CI-SA-RQ-366  Data processing shall manage data topic definitions
        # todo: data topics are being handled by pub sub at the level of streams
        self.dataprocessclient.activate_data_process(ctd_l0_all_data_process_id)
        

        #todo: check that activate event is received L4-CI-SA-RQ-367
        #todo... (it looks like no event is being published when the data process is activated... so below, we just check for now
        # todo... that the subscription is indeed activated) (Swarbhanu)


        # todo: monitor process to see if it is active (sa-rq-182)
        ctd_l0_all_data_process = self.rrclient.read(ctd_l0_all_data_process_id)
        input_subscription_id = ctd_l0_all_data_process.input_subscription_id
        subs = self.rrclient.read(input_subscription_id)
        self.assertTrue(subs.activated)

        # todo: This has not yet been completed by CEI, will prbly surface thru a DPMS call
        self.dataprocessclient.deactivate_data_process(ctd_l0_all_data_process_id)


        #-------------------------------
        # Retrieve the extended resources for data process definition and for data process
        #-------------------------------
        extended_process_definition = self.dataprocessclient.get_data_process_definition_extension(ctd_L0_all_dprocdef_id)
        self.assertEqual(1, len(extended_process_definition.data_processes))
        log.debug("test_createDataProcess: extended_process_definition  %s", str(extended_process_definition))

        extended_process = self.dataprocessclient.get_data_process_extension(ctd_l0_all_data_process_id)
        self.assertEqual(1, len(extended_process.input_data_products))
        log.debug("test_createDataProcess: extended_process  %s", str(extended_process))

        ################################ Test the removal of data processes ##################################

        #-------------------------------------------------------------------
        # Test the deleting of the data process
        #-------------------------------------------------------------------

        # Before deleting, get the input streams, output streams and the subscriptions so that they can be checked after deleting
#        dp_obj_1 = self.rrclient.read(ctd_l0_all_data_process_id)
#        input_subscription_id = dp_obj_1.input_subscription_id
#        out_prods, _ = self.rrclient.find_objects(subject=ctd_l0_all_data_process_id, predicate=PRED.hasOutputProduct, id_only=True)
#        in_prods, _ = self.rrclient.find_objects(ctd_l0_all_data_process_id, PRED.hasInputProduct, id_only=True)
#        in_streams = []
#        for in_prod in in_prods:
#            streams, _ = self.rrclient.find_objects(in_prod, PRED.hasStream, id_only=True)
#            in_streams.extend(streams)
#        out_streams = []
#        for out_prod in out_prods:
#            streams, _ = self.rrclient.find_objects(out_prod, PRED.hasStream, id_only=True)
#            out_streams.extend(streams)

        # Deleting the data process
        self.dataprocessclient.delete_data_process(ctd_l0_all_data_process_id)

        # Check that the data process got removed. Check the lcs state. It should be retired
        dp_obj = self.rrclient.read(ctd_l0_all_data_process_id)
        self.assertEquals(dp_obj.lcstate, LCS.RETIRED)

        # Check for process defs still attached to the data process
        dpd_assn_ids = self.rrclient.find_associations(subject=ctd_l0_all_data_process_id,  predicate=PRED.hasProcessDefinition, id_only=True)
        self.assertEquals(len(dpd_assn_ids), 0)

        # Check for output data product still attached to the data process
        out_products, assocs = self.rrclient.find_objects(subject=ctd_l0_all_data_process_id, predicate=PRED.hasOutputProduct, id_only=True)
        self.assertEquals(len(out_products), 0)
        self.assertEquals(len(assocs), 0)

        # Check for input data products still attached to the data process
        inprod_associations = self.rrclient.find_associations(ctd_l0_all_data_process_id, PRED.hasInputProduct)
        self.assertEquals(len(inprod_associations), 0)

        # Check for input data products still attached to the data process
        inprod_associations = self.rrclient.find_associations(ctd_l0_all_data_process_id, PRED.hasInputProduct)
        self.assertEquals(len(inprod_associations), 0)

        # Check of the data process has been deactivated
        self.assertIsNone(dp_obj.input_subscription_id)

        # Read the original subscription id of the data process and check that it has been deactivated
        with self.assertRaises(NotFound):
            self.pubsubclient.read_subscription(input_subscription_id)

        #-------------------------------------------------------------------
        # Delete the data process definition
        #-------------------------------------------------------------------

        # before deleting, get the process definition being associated to in order to be able to check later if the latter gets deleted as it should
        proc_def_ids, proc_def_asocs = self.rrclient.find_objects(ctd_l0_all_data_process_id, PRED.hasProcessDefinition)
        self.dataprocessclient.delete_data_process_definition(ctd_L0_all_dprocdef_id)

        # check that the data process definition has been retired
        dp_proc_def = self.rrclient.read(ctd_L0_all_dprocdef_id)
        self.assertEquals(dp_proc_def.lcstate, LCS.RETIRED)

        # Check for old associations of this data process definition
        proc_defs, proc_def_asocs = self.rrclient.find_objects(ctd_L0_all_dprocdef_id, PRED.hasProcessDefinition)
        self.assertEquals(len(proc_defs), 0)

        # find all associations where this is the subject
        _, obj_assns = self.rrclient.find_objects(subject= ctd_L0_all_dprocdef_id, id_only=True)
        self.assertEquals(len(obj_assns), 0)

        ################################ Test the removal of data processes ##################################
        # Try force delete... This should simply delete the associations and the data process object
        # from the resource registry

        #---------------------------------------------------------------------------------------------------------------
        # Force deleting a data process
        #---------------------------------------------------------------------------------------------------------------
        self.dataprocessclient.force_delete_data_process(ctd_l0_all_data_process_id)

        # find all associations where this is the subject
        _, obj_assns = self.rrclient.find_objects(subject=ctd_l0_all_data_process_id, id_only=True)

        # find all associations where this is the object
        _, sbj_assns = self.rrclient.find_subjects(object=ctd_l0_all_data_process_id, id_only=True)

        self.assertEquals(len(obj_assns), 0)
        self.assertEquals(len(sbj_assns), 0)
        
        with self.assertRaises(NotFound):
            self.rrclient.read(ctd_l0_all_data_process_id)

        #---------------------------------------------------------------------------------------------------------------
        # Force deleting a data process definition
        #---------------------------------------------------------------------------------------------------------------
        self.dataprocessclient.force_delete_data_process_definition(ctd_L0_all_dprocdef_id)

        # find all associations where this is the subject
        _, obj_assns = self.rrclient.find_objects(subject=ctd_l0_all_data_process_id, id_only=True)

        # find all associations where this is the object
        _, sbj_assns = self.rrclient.find_subjects(object=ctd_l0_all_data_process_id, id_only=True)

        self.assertEquals(len(obj_assns), 0)
        self.assertEquals(len(sbj_assns), 0)

        with self.assertRaises(NotFound):
            self.rrclient.read(ctd_l0_all_data_process_id)
class TestIntDataAcquisitionManagementService(IonIntegrationTestCase):

    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataAcquisitionManagementService
        self.client = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node)

    def tearDown(self):
        pass


    #@unittest.skip('Not done yet.')
    def test_data_source_ops(self):
        # test creating a new data source
        print 'Creating new data source'
        datasource_obj = IonObject(RT.DataSource,
                           name='DataSource1',
                           description='instrument based new source' ,
                            type='sbe37')
        try:
            ds_id = self.client.create_data_source(datasource_obj)
        except BadRequest as ex:
            self.fail("failed to create new data source: %s" %ex)
        print 'new data source id = ', ds_id


        # test reading a non-existent data source
        print 'reading non-existent data source'
        try:
            dp_obj = self.client.read_data_source('some_fake_id')
        except NotFound as ex:
            pass
        else:
            self.fail("non-existing data source was found during read: %s" %dp_obj)

        # update a data source (tests read also)
        print 'Updating data source'
        # first get the existing data source object
        try:
            datasource_obj = self.client.read_data_source(ds_id)
        except NotFound as ex:
            self.fail("existing data source was not found during read")
        else:
            pass

        # now tweak the object
        datasource_obj.description = 'the very first data source'
        # now write the dp back to the registry
        try:
            update_result = self.client.update_data_source(datasource_obj)
        except NotFound as ex:
            self.fail("existing data source was not found during update")
        except Conflict as ex:
            self.fail("revision conflict exception during data source update")
        #else:
        #    self.assertTrue(update_result == True)
        # now get the data source back to see if it was updated
        try:
            datasource_obj = self.client.read_data_source(ds_id)
        except NotFound as ex:
            self.fail("existing data source was not found during read")
        else:
            pass
        self.assertTrue(datasource_obj.description == 'the very first data source')


        # now 'delete' the data source
        print "deleting data source"
        try:
            delete_result = self.client.force_delete_data_source(ds_id)
        except NotFound as ex:
            self.fail("existing data source was not found during delete")
        #self.assertTrue(delete_result == True)
        # now try to get the deleted dp object
        try:
            dp_obj = self.client.read_data_source(ds_id)
        except NotFound as ex:
            pass
        else:
            self.fail("deleted data source was found during read")

        # now try to delete the already deleted data source object
        print "deleting non-existing data source"
        try:
            delete_result = self.client.delete_data_source(ds_id)
        except NotFound as ex:
            pass
        else:
            self.fail("non-existing data source was found during delete")


    #@unittest.skip('Not done yet.')
    def test_register_instrument(self):
        # Register an instrument as a data producer in coordination with DM PubSub: create stream, register and create producer object


        # set up initial instrument to register
        instrument_obj = IonObject(RT.InstrumentDevice, name='Inst1',description='an instrument that is creating the data product')
        instrument_id, rev = self.rrclient.create(instrument_obj)

        dataproduct_obj = IonObject(RT.DataProduct, name='DataProduct1',description='sample data product')
        dataproduct_id, rev = self.rrclient.create(dataproduct_obj)

        # test registering a new data producer
        try:
            ds_id = self.client.register_instrument(instrument_id)
        except BadRequest as ex:
            self.fail("failed to create new data producer: %s" %ex)
        print 'new data producer id = ', ds_id


        # test assigning a data product to an instrument, creating the stream for the product
        try:
            self.client.assign_data_product(instrument_id, dataproduct_id)
        except BadRequest as ex:
            self.fail("failed to assign data product to data producer: %s" %ex)
        except NotFound as ex:
            self.fail("failed to assign data product to data producer: %s" %ex)

        # test UNassigning a data product from instrument, deleting the stream for the product
        try:
            self.client.unassign_data_product(instrument_id, dataproduct_id)
        except BadRequest as ex:
            self.fail("failed to failed to UNassign data product to data producer data producer: %s" %ex)
        except NotFound as ex:
            self.fail("failed to failed to UNassign data product to data producer data producer: %s" %ex)

        # test UNregistering a new data producer
        try:
            ds_id = self.client.unregister_instrument(instrument_id)
        except NotFound as ex:
            self.fail("failed to unregister instrument producer: %s" %ex)


    def test_register_external_data_set(self):
        # Register an external data set as a data producer in coordination with DM PubSub: create stream, register and create producer object


        # set up initial instrument to register
        ext_dataset_obj = IonObject(RT.ExternalDataset, name='DataSet1',description='an external data feed')
        ext_dataset_id, rev = self.rrclient.create(ext_dataset_obj)

        dataproduct_obj = IonObject(RT.DataProduct, name='DataProduct1',description='sample data product')
        dataproduct_id, rev = self.rrclient.create(dataproduct_obj)


        # test registering a new external data set
        try:
            ds_id = self.client.register_external_data_set(ext_dataset_id)
        except BadRequest as ex:
            self.fail("failed to create new data producer: %s" %ex)
        print 'new data producer id = ', ds_id

        # test assigning a data product to an ext_dataset_id, creating the stream for the product
        try:
            self.client.assign_data_product(ext_dataset_id, dataproduct_id)
        except BadRequest as ex:
            self.fail("failed to assign data product to data producer: %s" %ex)
        except NotFound as ex:
            self.fail("failed to assign data product to data producer: %s" %ex)

        # test UNassigning a data product from ext_dataset_id, deleting the stream for the product
        try:
            self.client.unassign_data_product(ext_dataset_id, dataproduct_id)
        except BadRequest as ex:
            self.fail("failed to failed to UNassign data product to data producer data producer: %s" %ex)
        except NotFound as ex:
            self.fail("failed to failed to UNassign data product to data producer data producer: %s" %ex)

        # test UNregistering a external data set
        try:
            ds_id = self.client.unregister_external_data_set(ext_dataset_id)
        except NotFound as ex:
            self.fail("failed to unregister instrument producer: %s" %ex)





    #@unittest.skip('not ready')
    def test_eoi_resources(self):

            #
            # test creating a new data provider
            #
            print 'Creating new external_data_provider'
            dataprovider_obj = IonObject(RT.ExternalDataProvider,
                               name='ExtDataProvider1',
                               description='external data provider ')
            try:
                dataprovider_id = self.client.create_external_data_provider(dataprovider_obj)
            except BadRequest as ex:
                self.fail("failed to create new data provider: %s" %ex)
            print 'new data provider id = ', dataprovider_id

            #
            # test creating a new data source
            #
            print 'Creating new data source'
            datasource_obj = IonObject(RT.DataSource,
                               name='DataSource1',
                               description='data source ',
                               type='DAP')
            try:
                datasource_id = self.client.create_data_source(datasource_obj)
            except BadRequest as ex:
                self.fail("failed to create new data source: %s" %ex)
            print 'new data source id = ', datasource_id

            #
            # test creating a new data source model
            #
            print 'Creating new data source model'
            datamodel_obj = IonObject(RT.DataSourceModel,
                               name='DataSourceModel1',
                               description='data source model')
            try:
                datamodel_id = self.client.create_data_source_model(datamodel_obj)
            except BadRequest as ex:
                self.fail("failed to create new data source model: %s" %ex)
            print 'new data source model id = ', datamodel_id


            #
            # test creating a new external data set
            #
            print 'Creating new external data set'
            dataset_obj = IonObject(RT.ExternalDataset,
                               name='ExternalDataSet1',
                               description='external data set ')
            try:
                extdataset_id = self.client.create_external_dataset(dataset_obj)
            except BadRequest as ex:
                self.fail("failed to create new external data set: %s" %ex)
            print 'new external data set id = ', extdataset_id


            #
            # test creating a new dataset agent instance
            #
            print 'Creating new external data agent '
            datasetagent_obj = IonObject(RT.ExternalDatasetAgent,
                               name='ExternalDatasetAgent1',
                               description='external data agent ',
                                handler_module = 'module_name',
                                handler_class = 'class_name')
            try:
                datasetagent_id = self.client.create_external_dataset_agent(datasetagent_obj)
            except BadRequest as ex:
                self.fail("failed to create new external dataset agent: %s" %ex)
            print 'new external data agent  id = ', datasetagent_id


            #
            # test creating a new datasource agent instance
            #
            print 'Creating new  data source agent '
            datasourceagent_obj = IonObject(RT.DataSourceAgent,
                               name='DataSourceAgent1',
                               description=' DataSource agent ')
            try:
                datasource_agent_id = self.client.create_data_source_agent(datasourceagent_obj)
            except BadRequest as ex:
                self.fail("failed to create new external datasource agent: %s" %ex)
            print 'new external data agent  id = ', datasource_agent_id





            #
            # test creating a new dataset agent instance
            #
            print 'Creating new external dataset agent instance'
            datasetagentinstance_obj = IonObject(RT.ExternalDatasetAgentInstance,
                               name='ExternalDatasetAgentInstance1',
                               description='external dataset agent instance ')
            try:
                datasetagentinstance_id = self.client.create_external_dataset_agent_instance(datasetagentinstance_obj, datasetagent_id)
            except BadRequest as ex:
                self.fail("failed to create new external dataset agent instance: %s" %ex)
            print 'new external data agent instance id = ', datasetagentinstance_id

            #
            # test creating a new datasource agent instance
            #
            print 'Creating new  data source agent '
            datasourceagentinstance_obj = IonObject(RT.DataSourceAgentInstance,
                               name='ExternalDataSourceAgentInstance1',
                               description='external DataSource agent instance ')
            try:
                datasource_agent_instance_id = self.client.create_data_source_agent_instance(datasourceagentinstance_obj)
            except BadRequest as ex:
                self.fail("failed to create new external datasource agent instance: %s" %ex)
            print 'new external data agent  id = ', datasource_agent_instance_id

            #
            # test assign / unassign
            #

            self.client.unassign_data_source_from_external_data_provider(datasource_id, dataprovider_id)

            self.client.unassign_data_source_from_data_model(datasource_id, datamodel_id)

            self.client.unassign_external_dataset_from_data_source(extdataset_id, datasource_id)

            #
            # test read
            #

            try:
                dp_obj = self.client.read_external_data_provider(dataprovider_id)
            except NotFound as ex:
                self.fail("existing data provicer was not found during read")
            else:
                pass

            try:
                dp_obj = self.client.read_data_source(datasource_id)
            except NotFound as ex:
                self.fail("existing data source was not found during read")
            else:
                pass

            #
            # test delete
            #
            try:
                self.client.delete_external_data_provider(dataprovider_id)
                self.client.delete_data_source(datasource_id)
                self.client.delete_external_dataset(extdataset_id)
                self.client.delete_data_source_model(datamodel_id)
                self.client.delete_external_dataset_agent(datasetagent_id)
                self.client.delete_data_source_agent(datasource_agent_instance_id)

                self.client.force_delete_external_data_provider(dataprovider_id)
                self.client.force_delete_data_source(datasource_id)
                self.client.force_delete_external_dataset(extdataset_id)
                self.client.force_delete_data_source_model(datamodel_id)
                self.client.force_delete_external_dataset_agent(datasetagent_id)
                self.client.force_delete_data_source_agent(datasource_agent_instance_id)
            except NotFound as ex:
                self.fail("existing data product was not found during delete")


            # test reading a non-existent data product
            print 'reading non-existent data product'
            try:
                bad_obj = self.client.read_external_data_provider('some_fake_id')
            except NotFound as ex:
                pass
            else:
                self.fail("non-existing data product was found during read: %s" %bad_obj)
class TestIntDataAcquisitionManagementService(IonIntegrationTestCase):

    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataAcquisitionManagementService
        self.client = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node)

    def tearDown(self):
        pass


    #@unittest.skip('Not done yet.')
    def test_data_source_ops(self):
        # test creating a new data source
        print 'Creating new data source'
        datasource_obj = IonObject(RT.DataSource,
                           name='DataSource1',
                           description='instrument based new source' ,
                            data_source_type='sbe37')
        try:
            ds_id = self.client.create_data_source(datasource_obj)
        except BadRequest as ex:
            self.fail("failed to create new data source: %s" %ex)
        print 'new data source id = ', ds_id


        # test reading a non-existent data source
        print 'reading non-existent data source'
        try:
            dp_obj = self.client.read_data_source('some_fake_id')
        except NotFound as ex:
            pass
        else:
            self.fail("non-existing data source was found during read: %s" %dp_obj)

        # update a data source (tests read also)
        print 'Updating data source'
        # first get the existing data source object
        try:
            datasource_obj = self.client.read_data_source(ds_id)
        except NotFound as ex:
            self.fail("existing data source was not found during read")
        else:
            pass

        # now tweak the object
        datasource_obj.description = 'the very first data source'
        # now write the dp back to the registry
        try:
            update_result = self.client.update_data_source(datasource_obj)
        except NotFound as ex:
            self.fail("existing data source was not found during update")
        except Conflict as ex:
            self.fail("revision conflict exception during data source update")
        #else:
        #    self.assertTrue(update_result == True)
        # now get the data source back to see if it was updated
        try:
            datasource_obj = self.client.read_data_source(ds_id)
        except NotFound as ex:
            self.fail("existing data source was not found during read")
        else:
            pass
        self.assertTrue(datasource_obj.description == 'the very first data source')


        # now 'delete' the data source
        print "deleting data source"
        try:
            delete_result = self.client.force_delete_data_source(ds_id)
        except NotFound as ex:
            self.fail("existing data source was not found during delete")
        #self.assertTrue(delete_result == True)
        # now try to get the deleted dp object
        try:
            dp_obj = self.client.read_data_source(ds_id)
        except NotFound as ex:
            pass
        else:
            self.fail("deleted data source was found during read")

        # now try to delete the already deleted data source object
        print "deleting non-existing data source"
        try:
            delete_result = self.client.delete_data_source(ds_id)
        except NotFound as ex:
            pass
        else:
            self.fail("non-existing data source was found during delete")


    def test_register_instrument(self):
        # set up initial instrument to register
        instrument_obj = IonObject(RT.InstrumentDevice, name='Inst1',description='an instrument that is creating the data product')
        instrument_id, rev = self.rrclient.create(instrument_obj)

        self.base_register_instrument(instrument_id)

    def test_register_platform(self):
        # set up initial instrument to register
        platform_obj = IonObject(RT.PlatformDevice, name='Plat1',description='a platform that is creating the data product')
        platform_id, rev = self.rrclient.create(platform_obj)

    #@unittest.skip('Not done yet.')
    def base_register_instrument(self, instrument_id):
        # Register an instrument as a data producer in coordination with DM PubSub: create stream, register and create producer object

        dataproduct_obj = IonObject(RT.DataProduct, name='DataProduct1',description='sample data product')
        dataproduct_id, rev = self.rrclient.create(dataproduct_obj)

        # test registering a new data producer
        try:
            ds_id = self.client.register_instrument(instrument_id)
        except BadRequest as ex:
            self.fail("failed to create new data producer: %s" %ex)
        print 'new data producer id = ', ds_id


        # test assigning a data product to an instrument, creating the stream for the product
        try:
            self.client.assign_data_product(instrument_id, dataproduct_id)
            self.client.assign_data_product_source(dataproduct_id, instrument_id)
        except BadRequest as ex:
            self.fail("failed to assign data product to data producer: %s" %ex)
        except NotFound as ex:
            self.fail("failed to assign data product to data producer: %s" %ex)

        assocs = self.rrclient.find_associations(dataproduct_id, PRED.hasSource, instrument_id)
        if not assocs or len(assocs) == 0:
            self.fail("failed to assign data product to data producer")

        # test UNassigning a data product from instrument, deleting the stream for the product
        try:
            self.client.unassign_data_product(instrument_id, dataproduct_id)
            self.client.unassign_data_product_source(dataproduct_id, instrument_id)
        except BadRequest as ex:
            self.fail("failed to failed to UNassign data product to data producer data producer: %s" %ex)
        except NotFound as ex:
            self.fail("failed to failed to UNassign data product to data producer data producer: %s" %ex)

        assocs = self.rrclient.find_associations(dataproduct_id, PRED.hasSource, instrument_id)
        if  assocs:
            self.fail("failed to unassign data product to data producer")

        # test UNregistering a new data producer
        try:
            ds_id = self.client.unregister_instrument(instrument_id)
        except NotFound as ex:
            self.fail("failed to unregister instrument producer: %s" %ex)


    def test_register_external_data_set(self):
        # Register an external data set as a data producer in coordination with DM PubSub: create stream, register and create producer object


        # set up initial instrument to register
        ext_dataset_obj = IonObject(RT.ExternalDataset, name='DataSet1',description='an external data feed')
        ext_dataset_id, rev = self.rrclient.create(ext_dataset_obj)

        dataproduct_obj = IonObject(RT.DataProduct, name='DataProduct1',description='sample data product')
        dataproduct_id, rev = self.rrclient.create(dataproduct_obj)


        # test registering a new external data set
        try:
            ds_id = self.client.register_external_data_set(ext_dataset_id)
        except BadRequest as ex:
            self.fail("failed to create new data producer: %s" %ex)
        print 'new data producer id = ', ds_id

        # test assigning a data product to an ext_dataset_id, creating the stream for the product
        try:
            self.client.assign_data_product(ext_dataset_id, dataproduct_id)
        except BadRequest as ex:
            self.fail("failed to assign data product to data producer: %s" %ex)
        except NotFound as ex:
            self.fail("failed to assign data product to data producer: %s" %ex)

        # test UNassigning a data product from ext_dataset_id, deleting the stream for the product
        try:
            self.client.unassign_data_product(ext_dataset_id, dataproduct_id)
        except BadRequest as ex:
            self.fail("failed to failed to UNassign data product to data producer data producer: %s" %ex)
        except NotFound as ex:
            self.fail("failed to failed to UNassign data product to data producer data producer: %s" %ex)

        # test UNregistering a external data set
        try:
            ds_id = self.client.unregister_external_data_set(ext_dataset_id)
        except NotFound as ex:
            self.fail("failed to unregister instrument producer: %s" %ex)





    #@unittest.skip('not ready')
    def test_eoi_resources(self):

            #
            # test creating a new data provider
            #
            print 'Creating new external_data_provider'
            dataprovider_obj = IonObject(RT.ExternalDataProvider,
                               name='ExtDataProvider1',
                               description='external data provider ')
            try:
                dataprovider_id = self.client.create_external_data_provider(dataprovider_obj)
            except BadRequest as ex:
                self.fail("failed to create new data provider: %s" %ex)
            print 'new data provider id = ', dataprovider_id

            #
            # test creating a new data source
            #
            print 'Creating new data source'
            datasource_obj = IonObject(RT.DataSource,
                               name='DataSource1',
                               description='data source ',
                               data_source_type='DAP')
            try:
                datasource_id = self.client.create_data_source(datasource_obj)
            except BadRequest as ex:
                self.fail("failed to create new data source: %s" %ex)
            print 'new data source id = ', datasource_id

            #
            # test creating a new data source model
            #
            print 'Creating new data source model'
            datamodel_obj = IonObject(RT.DataSourceModel,
                               name='DataSourceModel1',
                               description='data source model')
            try:
                datamodel_id = self.client.create_data_source_model(datamodel_obj)
            except BadRequest as ex:
                self.fail("failed to create new data source model: %s" %ex)
            print 'new data source model id = ', datamodel_id


            #
            # test creating a new external data set
            #
            print 'Creating new external data set'
            dataset_obj = IonObject(RT.ExternalDataset,
                               name='ExternalDataSet1',
                               description='external data set ')
            try:
                extdataset_id = self.client.create_external_dataset(dataset_obj)
            except BadRequest as ex:
                self.fail("failed to create new external data set: %s" %ex)
            print 'new external data set id = ', extdataset_id


            #
            # test creating a new dataset agent instance
            #
            print 'Creating new external data agent '
            datasetagent_obj = IonObject(RT.ExternalDatasetAgent,
                               name='ExternalDatasetAgent1',
                               description='external data agent ')
            try:
                datasetagent_id = self.client.create_external_dataset_agent(datasetagent_obj)
            except BadRequest as ex:
                self.fail("failed to create new external dataset agent: %s" %ex)
            print 'new external data agent  id = ', datasetagent_id


            #
            # test creating a new datasource agent instance
            #
            print 'Creating new  data source agent '
            datasourceagent_obj = IonObject(RT.DataSourceAgent,
                               name='DataSourceAgent1',
                               description=' DataSource agent ')
            try:
                datasource_agent_id = self.client.create_data_source_agent(datasourceagent_obj)
            except BadRequest as ex:
                self.fail("failed to create new external datasource agent: %s" %ex)
            print 'new external data agent  id = ', datasource_agent_id





            #
            # test creating a new dataset agent instance
            #
            print 'Creating new external dataset agent instance'
            datasetagentinstance_obj = IonObject(RT.ExternalDatasetAgentInstance,
                               name='ExternalDatasetAgentInstance1',
                               description='external dataset agent instance ')
            try:
                datasetagentinstance_id = self.client.create_external_dataset_agent_instance(datasetagentinstance_obj, datasetagent_id)
            except BadRequest as ex:
                self.fail("failed to create new external dataset agent instance: %s" %ex)
            print 'new external data agent instance id = ', datasetagentinstance_id

            #
            # test creating a new datasource agent instance
            #
            print 'Creating new  data source agent '
            datasourceagentinstance_obj = IonObject(RT.DataSourceAgentInstance,
                               name='ExternalDataSourceAgentInstance1',
                               description='external DataSource agent instance ')
            try:
                datasource_agent_instance_id = self.client.create_data_source_agent_instance(datasourceagentinstance_obj)
            except BadRequest as ex:
                self.fail("failed to create new external datasource agent instance: %s" %ex)
            print 'new external data agent  id = ', datasource_agent_instance_id

            #
            # test assign / unassign
            #

            self.client.unassign_data_source_from_external_data_provider(datasource_id, dataprovider_id)

            self.client.unassign_data_source_from_data_model(datasource_id, datamodel_id)

            self.client.unassign_external_dataset_from_data_source(extdataset_id, datasource_id)

            #
            # test read
            #

            try:
                dp_obj = self.client.read_external_data_provider(dataprovider_id)
            except NotFound as ex:
                self.fail("existing data provicer was not found during read")
            else:
                pass

            try:
                dp_obj = self.client.read_data_source(datasource_id)
            except NotFound as ex:
                self.fail("existing data source was not found during read")
            else:
                pass

            #
            # test delete
            #
            try:
                self.client.delete_external_data_provider(dataprovider_id)
                self.client.delete_data_source(datasource_id)
                self.client.delete_external_dataset(extdataset_id)
                self.client.delete_data_source_model(datamodel_id)
                self.client.delete_external_dataset_agent(datasetagent_id)
                self.client.delete_data_source_agent_instance(datasource_agent_instance_id)

                self.client.force_delete_external_data_provider(dataprovider_id)
                self.client.force_delete_data_source(datasource_id)
                self.client.force_delete_external_dataset(extdataset_id)
                self.client.force_delete_data_source_model(datamodel_id)
                self.client.force_delete_external_dataset_agent(datasetagent_id)
                self.client.force_delete_data_source_agent_instance(datasource_agent_instance_id)
            except NotFound as ex:
                self.fail("existing data product was not found during delete")


            # test reading a non-existent data product
            print 'reading non-existent data product'
            try:
                bad_obj = self.client.read_external_data_provider('some_fake_id')
            except NotFound as ex:
                pass
            else:
                self.fail("non-existing data product was found during read: %s" %bad_obj)


    def make_grt_parser(self):
        return self.client.create_parser(Parser(name='grt', description='', module='ion.util.parsers.global_range_test', method='grt_parser', config=None))


    @unittest.skip("Deprecated")
    def test_qc_attachment(self):
        instrument_device = InstrumentDevice(name='whatever')
        instrument_device_id,_ = self.rrclient.create(instrument_device)
        self.addCleanup(self.rrclient.delete, instrument_device_id)
        self.client.register_instrument(instrument_device_id)
        self.addCleanup(self.client.unregister_instrument, instrument_device_id)
        dp = DataProduct(name='instrument output')

        dp_id,_ = self.rrclient.create(dp)
        self.addCleanup(self.rrclient.delete, dp_id)

        parser_id = self.make_grt_parser()
        attachment = Attachment(name='qc ref', attachment_type=AttachmentType.REFERENCE,content=global_range_test_document, context=ReferenceAttachmentContext(parser_id=parser_id))
        att_id = self.rrclient.create_attachment(dp_id, attachment)
        self.addCleanup(self.rrclient.delete_attachment, att_id)

        attachment2 = Attachment(name='qc ref2', attachment_type=AttachmentType.REFERENCE, content=global_range_test_document2, context=ReferenceAttachmentContext(parser_id=parser_id))
        att2_id = self.rrclient.create_attachment(dp_id, attachment2)
        self.addCleanup(self.rrclient.delete_attachment, att2_id)

        self.client.assign_data_product(instrument_device_id, dp_id)
        self.addCleanup(self.client.unassign_data_product, instrument_device_id, dp_id)
        svm = StoredValueManager(self.container)
        doc = svm.read_value('grt_CE01ISSM-MF005-01-CTDBPC999_TEMPWAT')
        np.testing.assert_array_almost_equal(doc['grt_min_value'], -2.)
コード例 #8
0
class TestDataProductProvenance(IonIntegrationTestCase):
    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(
            node=self.container.node)
        self.pubsubclient = PubsubManagementServiceClient(
            node=self.container.node)
        self.ingestclient = IngestionManagementServiceClient(
            node=self.container.node)
        self.dpmsclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(
            node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(
            node=self.container.node)
        self.omsclient = ObservatoryManagementServiceClient(
            node=self.container.node)
        self.process_dispatcher = ProcessDispatcherServiceClient()

        self.dataset_management = DatasetManagementServiceClient()

        # deactivate all data processes when tests are complete
        def killAllDataProcesses():
            for proc_id in self.rrclient.find_resources(
                    RT.DataProcess, None, None, True)[0]:
                self.dataprocessclient.deactivate_data_process(proc_id)
                self.dataprocessclient.delete_data_process(proc_id)

        self.addCleanup(killAllDataProcesses)

    def test_get_data_product_provenance_report(self):
        #Create a test device
        device_obj = Device(name='Device1', description='test instrument site')
        device_id, _ = self.rrclient.create(device_obj)
        self.addCleanup(self.rrclient.delete, device_id)

        #Create a test DataProduct
        data_product1_obj = DataProduct(name='DataProduct1',
                                        description='test data product 1')
        data_product1_id, _ = self.rrclient.create(data_product1_obj)
        self.addCleanup(self.rrclient.delete, data_product1_id)

        #Create a test DataProcess
        data_process_obj = DataProcess(name='DataProcess',
                                       description='test data process')
        data_process_id, _ = self.rrclient.create(data_process_obj)
        self.addCleanup(self.rrclient.delete, data_process_id)

        #Create a second test DataProduct
        data_product2_obj = DataProduct(name='DataProduct2',
                                        description='test data product 2')
        data_product2_id, _ = self.rrclient.create(data_product2_obj)
        self.addCleanup(self.rrclient.delete, data_product2_id)

        #Create a test DataProducer
        data_producer_obj = DataProducer(name='DataProducer',
                                         description='test data producer')
        data_producer_id, rev = self.rrclient.create(data_producer_obj)

        #Link the DataProcess to the second DataProduct manually
        assoc_id, _ = self.rrclient.create_association(
            subject=data_process_id,
            predicate=PRED.hasInputProduct,
            object=data_product2_id)
        self.addCleanup(self.rrclient.delete_association, assoc_id)

        # Register the instrument and process. This links the device and the data process
        # with their own producers
        self.damsclient.register_instrument(device_id)
        self.addCleanup(self.damsclient.unregister_instrument, device_id)
        self.damsclient.register_process(data_process_id)
        self.addCleanup(self.damsclient.unregister_process, data_process_id)

        #Manually link the first DataProduct with the test DataProducer
        assoc_id, _ = self.rrclient.create_association(
            subject=data_product1_id,
            predicate=PRED.hasDataProducer,
            object=data_producer_id)

        #Get the DataProducer linked to the DataProcess (created in register_process above)
        #Associate that with with DataProduct1's DataProducer
        data_process_producer_ids, _ = self.rrclient.find_objects(
            subject=data_process_id,
            predicate=PRED.hasDataProducer,
            object_type=RT.DataProducer,
            id_only=True)
        assoc_id, _ = self.rrclient.create_association(
            subject=data_process_producer_ids[0],
            predicate=PRED.hasParent,
            object=data_producer_id)
        self.addCleanup(self.rrclient.delete_association, assoc_id)

        #Get the DataProducer linked to the Device (created in register_instrument
        #Associate that with the DataProcess's DataProducer
        device_producer_ids, _ = self.rrclient.find_objects(
            subject=device_id,
            predicate=PRED.hasDataProducer,
            object_type=RT.DataProducer,
            id_only=True)
        assoc_id, _ = self.rrclient.create_association(
            subject=data_producer_id,
            predicate=PRED.hasParent,
            object=device_producer_ids[0])

        #Create the links between the Device, DataProducts, DataProcess, and all DataProducers
        self.damsclient.assign_data_product(input_resource_id=device_id,
                                            data_product_id=data_product1_id)
        self.addCleanup(self.damsclient.unassign_data_product, device_id,
                        data_product1_id)
        self.damsclient.assign_data_product(input_resource_id=data_process_id,
                                            data_product_id=data_product2_id)
        self.addCleanup(self.damsclient.unassign_data_product, data_process_id,
                        data_product2_id)

        #Traverse through the relationships to get the links between objects
        res = self.dpmsclient.get_data_product_provenance_report(
            data_product2_id)

        #Make sure there are four keys
        self.assertEqual(len(res.keys()), 4)

        parent_count = 0
        config_count = 0
        for v in res.itervalues():
            if 'parent' in v:
                parent_count += 1
            if 'config' in v:
                config_count += 1

        #Make sure there are three parents and four configs
        self.assertEqual(parent_count, 3)
        self.assertEqual(config_count, 4)

    @unittest.skip('This test is obsolete with new framework')
    def test_get_provenance(self):

        #create a deployment with metadata and an initial site and device
        instrument_site_obj = IonObject(RT.InstrumentSite,
                                        name='InstrumentSite1',
                                        description='test instrument site')
        instrument_site_id = self.omsclient.create_instrument_site(
            instrument_site_obj, "")
        log.debug('test_get_provenance: new instrument_site_id id = %s ',
                  str(instrument_site_id))

        # Create InstrumentModel
        instModel_obj = IonObject(RT.InstrumentModel,
                                  name='SBE37IMModel',
                                  description="SBE37IMModel")

        try:
            instModel_id = self.imsclient.create_instrument_model(
                instModel_obj)
        except BadRequest as ex:
            self.fail("failed to create new InstrumentModel: %s" % ex)
        log.debug('test_get_provenance: new InstrumentModel id = %s ',
                  str(instModel_id))

        self.omsclient.assign_instrument_model_to_instrument_site(
            instModel_id, instrument_site_id)

        # Create InstrumentAgent
        parsed_config = StreamConfiguration(
            stream_name='parsed',
            parameter_dictionary_name='ctd_parsed_param_dict')
        instAgent_obj = IonObject(RT.InstrumentAgent,
                                  name='agent007',
                                  description="SBE37IMAgent",
                                  driver_uri=DRV_URI_GOOD,
                                  stream_configurations=[parsed_config])
        try:
            instAgent_id = self.imsclient.create_instrument_agent(
                instAgent_obj)
        except BadRequest as ex:
            self.fail("failed to create new InstrumentAgent: %s" % ex)
        log.debug('test_get_provenance:new InstrumentAgent id = %s',
                  instAgent_id)

        self.imsclient.assign_instrument_model_to_instrument_agent(
            instModel_id, instAgent_id)

        # Create InstrumentDevice
        log.debug(
            'test_get_provenance: Create instrument resource to represent the SBE37 (SA Req: L4-CI-SA-RQ-241) '
        )
        instDevice_obj = IonObject(RT.InstrumentDevice,
                                   name='SBE37IMDevice',
                                   description="SBE37IMDevice",
                                   serial_number="12345")
        try:
            instDevice_id = self.imsclient.create_instrument_device(
                instrument_device=instDevice_obj)
            self.imsclient.assign_instrument_model_to_instrument_device(
                instModel_id, instDevice_id)
        except BadRequest as ex:
            self.fail("failed to create new InstrumentDevice: %s" % ex)

        log.debug(
            "test_get_provenance: new InstrumentDevice id = %s    (SA Req: L4-CI-SA-RQ-241) ",
            instDevice_id)

        #-------------------------------
        # Create CTD Parsed  data product
        #-------------------------------
        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()

        pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            'ctd_parsed_param_dict', id_only=True)
        parsed_stream_def_id = self.pubsubclient.create_stream_definition(
            name='parsed', parameter_dictionary_id=pdict_id)

        log.debug(
            'test_get_provenance:Creating new CDM data product with a stream definition'
        )

        dp_obj = IonObject(RT.DataProduct,
                           name='the parsed data',
                           description='ctd stream test',
                           temporal_domain=tdom,
                           spatial_domain=sdom)

        ctd_parsed_data_product = self.dpmsclient.create_data_product(
            data_product=dp_obj, stream_definition_id=parsed_stream_def_id)
        log.debug('new dp_id = %s', ctd_parsed_data_product)

        self.damsclient.assign_data_product(
            input_resource_id=instDevice_id,
            data_product_id=ctd_parsed_data_product)
        self.dpmsclient.activate_data_product_persistence(
            data_product_id=ctd_parsed_data_product)

        #-------------------------------
        # create a data product for the site to pass the OMS check.... we need to remove this check
        #-------------------------------
        dp_obj = IonObject(RT.DataProduct,
                           name='DP1',
                           description='some new dp',
                           temporal_domain=tdom,
                           spatial_domain=sdom)

        log_data_product_id = self.dpmsclient.create_data_product(
            dp_obj, parsed_stream_def_id)

        #-------------------------------
        # Deploy instrument device to instrument site
        #-------------------------------
        deployment_obj = IonObject(RT.Deployment,
                                   name='TestDeployment',
                                   description='some new deployment')
        deployment_id = self.omsclient.create_deployment(deployment_obj)
        self.omsclient.deploy_instrument_site(instrument_site_id,
                                              deployment_id)
        self.imsclient.deploy_instrument_device(instDevice_id, deployment_id)

        log.debug("test_create_deployment: created deployment id: %s ",
                  str(deployment_id))

        self.omsclient.activate_deployment(deployment_id)
        inst_device_objs, _ = self.rrclient.find_objects(
            subject=instrument_site_id,
            predicate=PRED.hasDevice,
            object_type=RT.InstrumetDevice,
            id_only=False)
        log.debug("test_create_deployment: deployed device: %s ",
                  str(inst_device_objs[0]))

        #-------------------------------
        # Create the agent instance
        #-------------------------------

        port_agent_config = {
            'device_addr': CFG.device.sbe37.host,
            'device_port': CFG.device.sbe37.port,
            'process_type': PortAgentProcessType.UNIX,
            'binary_path': "port_agent",
            'port_agent_addr': 'localhost',
            'command_port': CFG.device.sbe37.port_agent_cmd_port,
            'data_port': CFG.device.sbe37.port_agent_data_port,
            'log_level': 5,
            'type': PortAgentType.ETHERNET
        }

        instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance,
                                          name='SBE37IMAgentInstance',
                                          description="SBE37IMAgentInstance",
                                          port_agent_config=port_agent_config)

        instAgentInstance_id = self.imsclient.create_instrument_agent_instance(
            instAgentInstance_obj, instAgent_id, instDevice_id)

        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Data Process Definition
        #-------------------------------
        log.debug(
            "TestDataProductProvenance: create data process definition ctd_L0_all"
        )
        dpd_obj = IonObject(
            RT.DataProcessDefinition,
            name='ctd_L0_all',
            description='transform ctd package into three separate L0 streams',
            module='ion.processes.data.transforms.ctd.ctd_L0_all',
            class_name='ctd_L0_all')
        try:
            ctd_L0_all_dprocdef_id = self.dataprocessclient.create_data_process_definition(
                dpd_obj)
        except BadRequest as ex:
            self.fail(
                "failed to create new ctd_L0_all data process definition: %s" %
                ex)

        #-------------------------------
        # L1 Conductivity: Data Process Definition
        #-------------------------------
        log.debug(
            "TestDataProductProvenance: create data process definition CTDL1ConductivityTransform"
        )
        dpd_obj = IonObject(
            RT.DataProcessDefinition,
            name='ctd_L1_conductivity',
            description='create the L1 conductivity data product',
            module='ion.processes.data.transforms.ctd.ctd_L1_conductivity',
            class_name='CTDL1ConductivityTransform')
        try:
            ctd_L1_conductivity_dprocdef_id = self.dataprocessclient.create_data_process_definition(
                dpd_obj)
        except BadRequest as ex:
            self.fail(
                "failed to create new CTDL1ConductivityTransform data process definition: %s"
                % ex)

        #-------------------------------
        # L1 Pressure: Data Process Definition
        #-------------------------------
        log.debug(
            "TestDataProductProvenance: create data process definition CTDL1PressureTransform"
        )
        dpd_obj = IonObject(
            RT.DataProcessDefinition,
            name='ctd_L1_pressure',
            description='create the L1 pressure data product',
            module='ion.processes.data.transforms.ctd.ctd_L1_pressure',
            class_name='CTDL1PressureTransform')
        try:
            ctd_L1_pressure_dprocdef_id = self.dataprocessclient.create_data_process_definition(
                dpd_obj)
        except BadRequest as ex:
            self.fail(
                "failed to create new CTDL1PressureTransform data process definition: %s"
                % ex)

        #-------------------------------
        # L1 Temperature: Data Process Definition
        #-------------------------------
        log.debug(
            "TestDataProductProvenance: create data process definition CTDL1TemperatureTransform"
        )
        dpd_obj = IonObject(
            RT.DataProcessDefinition,
            name='ctd_L1_temperature',
            description='create the L1 temperature data product',
            module='ion.processes.data.transforms.ctd.ctd_L1_temperature',
            class_name='CTDL1TemperatureTransform')
        try:
            ctd_L1_temperature_dprocdef_id = self.dataprocessclient.create_data_process_definition(
                dpd_obj)
        except BadRequest as ex:
            self.fail(
                "failed to create new CTDL1TemperatureTransform data process definition: %s"
                % ex)

        #-------------------------------
        # L2 Salinity: Data Process Definition
        #-------------------------------
        log.debug(
            "TestDataProductProvenance: create data process definition SalinityTransform"
        )
        dpd_obj = IonObject(
            RT.DataProcessDefinition,
            name='ctd_L2_salinity',
            description='create the L1 temperature data product',
            module='ion.processes.data.transforms.ctd.ctd_L2_salinity',
            class_name='SalinityTransform')
        try:
            ctd_L2_salinity_dprocdef_id = self.dataprocessclient.create_data_process_definition(
                dpd_obj)
        except BadRequest as ex:
            self.fail(
                "failed to create new SalinityTransform data process definition: %s"
                % ex)

        #-------------------------------
        # L2 Density: Data Process Definition
        #-------------------------------
        log.debug(
            "TestDataProductProvenance: create data process definition DensityTransform"
        )
        dpd_obj = IonObject(
            RT.DataProcessDefinition,
            name='ctd_L2_density',
            description='create the L1 temperature data product',
            module='ion.processes.data.transforms.ctd.ctd_L2_density',
            class_name='DensityTransform')
        try:
            ctd_L2_density_dprocdef_id = self.dataprocessclient.create_data_process_definition(
                dpd_obj)
        except BadRequest as ex:
            self.fail(
                "failed to create new DensityTransform data process definition: %s"
                % ex)

        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Output Data Products
        #-------------------------------

        outgoing_stream_l0_conductivity_id = self.pubsubclient.create_stream_definition(
            name='L0_Conductivity', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            outgoing_stream_l0_conductivity_id,
            ctd_L0_all_dprocdef_id,
            binding='conductivity')

        outgoing_stream_l0_pressure_id = self.pubsubclient.create_stream_definition(
            name='L0_Pressure', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            outgoing_stream_l0_pressure_id,
            ctd_L0_all_dprocdef_id,
            binding='pressure')

        outgoing_stream_l0_temperature_id = self.pubsubclient.create_stream_definition(
            name='L0_Temperature', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            outgoing_stream_l0_temperature_id,
            ctd_L0_all_dprocdef_id,
            binding='temperature')

        log.debug(
            "TestDataProductProvenance: create output data product L0 conductivity"
        )

        ctd_l0_conductivity_output_dp_obj = IonObject(
            RT.DataProduct,
            name='L0_Conductivity',
            description='transform output conductivity',
            temporal_domain=tdom,
            spatial_domain=sdom)

        ctd_l0_conductivity_output_dp_id = self.dpmsclient.create_data_product(
            ctd_l0_conductivity_output_dp_obj,
            outgoing_stream_l0_conductivity_id)

        log.debug(
            "TestDataProductProvenance: create output data product L0 pressure"
        )

        ctd_l0_pressure_output_dp_obj = IonObject(
            RT.DataProduct,
            name='L0_Pressure',
            description='transform output pressure',
            temporal_domain=tdom,
            spatial_domain=sdom)

        ctd_l0_pressure_output_dp_id = self.dpmsclient.create_data_product(
            ctd_l0_pressure_output_dp_obj, outgoing_stream_l0_pressure_id)
        log.debug(
            "TestDataProductProvenance: create output data product L0 temperature"
        )

        ctd_l0_temperature_output_dp_obj = IonObject(
            RT.DataProduct,
            name='L0_Temperature',
            description='transform output temperature',
            temporal_domain=tdom,
            spatial_domain=sdom)

        ctd_l0_temperature_output_dp_id = self.dpmsclient.create_data_product(
            ctd_l0_temperature_output_dp_obj,
            outgoing_stream_l0_temperature_id)

        #-------------------------------
        # L1 Conductivity - Temperature - Pressure: Output Data Products
        #-------------------------------

        outgoing_stream_l1_conductivity_id = self.pubsubclient.create_stream_definition(
            name='L1_conductivity', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            outgoing_stream_l1_conductivity_id,
            ctd_L1_conductivity_dprocdef_id,
            binding='conductivity')

        outgoing_stream_l1_pressure_id = self.pubsubclient.create_stream_definition(
            name='L1_Pressure', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            outgoing_stream_l1_pressure_id,
            ctd_L1_pressure_dprocdef_id,
            binding='pressure')

        outgoing_stream_l1_temperature_id = self.pubsubclient.create_stream_definition(
            name='L1_Temperature', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            outgoing_stream_l1_temperature_id,
            ctd_L1_temperature_dprocdef_id,
            binding='temperature')

        log.debug(
            "TestDataProductProvenance: create output data product L1 conductivity"
        )

        ctd_l1_conductivity_output_dp_obj = IonObject(
            RT.DataProduct,
            name='L1_Conductivity',
            description='transform output L1 conductivity',
            temporal_domain=tdom,
            spatial_domain=sdom)

        ctd_l1_conductivity_output_dp_id = self.dpmsclient.create_data_product(
            ctd_l1_conductivity_output_dp_obj,
            outgoing_stream_l1_conductivity_id)

        log.debug(
            "TestDataProductProvenance: create output data product L1 pressure"
        )

        ctd_l1_pressure_output_dp_obj = IonObject(
            RT.DataProduct,
            name='L1_Pressure',
            description='transform output L1 pressure',
            temporal_domain=tdom,
            spatial_domain=sdom)

        ctd_l1_pressure_output_dp_id = self.dpmsclient.create_data_product(
            ctd_l1_pressure_output_dp_obj, outgoing_stream_l1_pressure_id)

        log.debug(
            "TestDataProductProvenance: create output data product L1 temperature"
        )

        ctd_l1_temperature_output_dp_obj = IonObject(
            RT.DataProduct,
            name='L1_Temperature',
            description='transform output L1 temperature',
            temporal_domain=tdom,
            spatial_domain=sdom)

        ctd_l1_temperature_output_dp_id = self.dpmsclient.create_data_product(
            ctd_l1_temperature_output_dp_obj,
            outgoing_stream_l1_temperature_id)

        #-------------------------------
        # L2 Salinity - Density: Output Data Products
        #-------------------------------

        outgoing_stream_l2_salinity_id = self.pubsubclient.create_stream_definition(
            name='L2_salinity', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            outgoing_stream_l2_salinity_id,
            ctd_L2_salinity_dprocdef_id,
            binding='salinity')

        outgoing_stream_l2_density_id = self.pubsubclient.create_stream_definition(
            name='L2_Density', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            outgoing_stream_l2_density_id,
            ctd_L2_density_dprocdef_id,
            binding='density')

        log.debug(
            "TestDataProductProvenance: create output data product L2 Salinity"
        )

        ctd_l2_salinity_output_dp_obj = IonObject(
            RT.DataProduct,
            name='L2_Salinity',
            description='transform output L2 salinity',
            temporal_domain=tdom,
            spatial_domain=sdom)

        ctd_l2_salinity_output_dp_id = self.dpmsclient.create_data_product(
            ctd_l2_salinity_output_dp_obj, outgoing_stream_l2_salinity_id)

        log.debug(
            "TestDataProductProvenance: create output data product L2 Density")

        #        ctd_l2_density_output_dp_obj = IonObject(   RT.DataProduct,
        #                                                    name='L2_Density',
        #                                                    description='transform output pressure',
        #                                                    temporal_domain = tdom,
        #                                                    spatial_domain = sdom)
        #
        #        ctd_l2_density_output_dp_id = self.dpmsclient.create_data_product(ctd_l2_density_output_dp_obj,
        #                                                                            outgoing_stream_l2_density_id,
        #                                                                            parameter_dictionary)

        contactInfo = ContactInformation()
        contactInfo.individual_names_given = "Bill"
        contactInfo.individual_name_family = "Smith"
        contactInfo.street_address = "111 First St"
        contactInfo.city = "San Diego"
        contactInfo.email = "*****@*****.**"
        contactInfo.phones = ["858-555-6666"]
        contactInfo.country = "USA"
        contactInfo.postal_code = "92123"

        ctd_l2_density_output_dp_obj = IonObject(
            RT.DataProduct,
            name='L2_Density',
            description='transform output pressure',
            contacts=[contactInfo],
            iso_topic_category="my_iso_topic_category_here",
            quality_control_level="1",
            temporal_domain=tdom,
            spatial_domain=sdom)

        ctd_l2_density_output_dp_id = self.dpmsclient.create_data_product(
            ctd_l2_density_output_dp_obj, outgoing_stream_l2_density_id)

        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Create the data process
        #-------------------------------
        log.debug(
            "TestDataProductProvenance: create L0 all data_process start")
        try:
            input_data_products = [ctd_parsed_data_product]
            output_data_products = [
                ctd_l0_conductivity_output_dp_id, ctd_l0_pressure_output_dp_id,
                ctd_l0_temperature_output_dp_id
            ]

            ctd_l0_all_data_process_id = self.dataprocessclient.create_data_process(
                data_process_definition_id=ctd_L0_all_dprocdef_id,
                in_data_product_ids=input_data_products,
                out_data_product_ids=output_data_products)
            #activate only this data process just for coverage
            self.dataprocessclient.activate_data_process(
                ctd_l0_all_data_process_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" % ex)

        contents = "this is the lookup table  contents, replace with a file..."
        att = IonObject(RT.Attachment,
                        name='deviceLookupTable',
                        content=base64.encodestring(contents),
                        keywords=['DataProcessInput'],
                        attachment_type=AttachmentType.ASCII)
        deviceAttachment = self.rrclient.create_attachment(
            ctd_l0_all_data_process_id, att)
        log.info(
            'test_createTransformsThenActivateInstrument: InstrumentDevice attachment id = %s',
            deviceAttachment)

        log.debug(
            "TestDataProductProvenance: create L0 all data_process return")

        #-------------------------------
        # L1 Conductivity: Create the data process
        #-------------------------------
        log.debug(
            "TestDataProductProvenance: create L1 Conductivity data_process start"
        )
        try:
            l1_conductivity_data_process_id = self.dataprocessclient.create_data_process(
                data_process_definition_id=ctd_L1_conductivity_dprocdef_id,
                in_data_product_ids=[ctd_l0_conductivity_output_dp_id],
                out_data_product_ids=[ctd_l1_conductivity_output_dp_id])

            self.dataprocessclient.activate_data_process(
                l1_conductivity_data_process_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" % ex)

        #-------------------------------
        # L1 Pressure: Create the data process
        #-------------------------------
        log.debug(
            "TestDataProductProvenance: create L1_Pressure data_process start")
        try:
            l1_pressure_data_process_id = self.dataprocessclient.create_data_process(
                data_process_definition_id=ctd_L1_pressure_dprocdef_id,
                in_data_product_ids=[ctd_l0_pressure_output_dp_id],
                out_data_product_ids=[ctd_l1_pressure_output_dp_id])

            self.dataprocessclient.activate_data_process(
                l1_pressure_data_process_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" % ex)

        #-------------------------------
        # L1 Temperature: Create the data process
        #-------------------------------
        log.debug(
            "TestDataProductProvenance: create L1_Pressure data_process start")
        try:
            l1_temperature_all_data_process_id = self.dataprocessclient.create_data_process(
                data_process_definition_id=ctd_L1_temperature_dprocdef_id,
                in_data_product_ids=[ctd_l0_temperature_output_dp_id],
                out_data_product_ids=[ctd_l1_temperature_output_dp_id])

            self.dataprocessclient.activate_data_process(
                l1_temperature_all_data_process_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" % ex)

        #-------------------------------
        # L2 Salinity: Create the data process
        #-------------------------------
        log.debug(
            "TestDataProductProvenance: create L2_salinity data_process start")
        try:
            l2_salinity_all_data_process_id = self.dataprocessclient.create_data_process(
                data_process_definition_id=ctd_L2_salinity_dprocdef_id,
                in_data_product_ids=[
                    ctd_l1_conductivity_output_dp_id,
                    ctd_l1_pressure_output_dp_id,
                    ctd_l1_temperature_output_dp_id
                ],
                out_data_product_ids=[ctd_l2_salinity_output_dp_id])

            self.dataprocessclient.activate_data_process(
                l2_salinity_all_data_process_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" % ex)

        #-------------------------------
        # L2 Density: Create the data process
        #-------------------------------
        log.debug(
            "TestDataProductProvenance: create L2_Density data_process start")
        try:
            in_dp_ids = [
                ctd_l1_conductivity_output_dp_id, ctd_l1_pressure_output_dp_id,
                ctd_l1_temperature_output_dp_id
            ]
            out_dp_ids = [ctd_l2_density_output_dp_id]

            l2_density_all_data_process_id = self.dataprocessclient.create_data_process(
                data_process_definition_id=ctd_L2_density_dprocdef_id,
                in_data_product_ids=in_dp_ids,
                out_data_product_ids=out_dp_ids)

            self.dataprocessclient.activate_data_process(
                l2_density_all_data_process_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" % ex)

        #-------------------------------
        # Launch InstrumentAgentInstance, connect to the resource agent client
        #-------------------------------
        self.imsclient.start_instrument_agent_instance(
            instrument_agent_instance_id=instAgentInstance_id)

        inst_agent_instance_obj = self.imsclient.read_instrument_agent_instance(
            instAgentInstance_id)
        print 'TestDataProductProvenance: Instrument agent instance obj: = ', inst_agent_instance_obj

        # Start a resource agent client to talk with the instrument agent.
        #        self._ia_client = ResourceAgentClient('iaclient', name=ResourceAgentClient._get_agent_process_id(instDevice_id,  process=FakeProcess())
        #        print 'activate_instrument: got ia client %s', self._ia_client
        #        log.debug(" test_createTransformsThenActivateInstrument:: got ia client %s", str(self._ia_client))

        #-------------------------------
        # Deactivate InstrumentAgentInstance
        #-------------------------------
        self.imsclient.stop_instrument_agent_instance(
            instrument_agent_instance_id=instAgentInstance_id)

        self.dataprocessclient.deactivate_data_process(
            l2_density_all_data_process_id)
        self.dataprocessclient.deactivate_data_process(
            l2_salinity_all_data_process_id)
        self.dataprocessclient.deactivate_data_process(
            l1_temperature_all_data_process_id)
        self.dataprocessclient.deactivate_data_process(
            l1_pressure_data_process_id)
        self.dataprocessclient.deactivate_data_process(
            l1_conductivity_data_process_id)
        self.dataprocessclient.deactivate_data_process(
            ctd_l0_all_data_process_id)

        #-------------------------------
        # Retrieve the provenance info for the ctd density data product
        #-------------------------------
        provenance_dict = self.dpmsclient.get_data_product_provenance(
            ctd_l2_density_output_dp_id)
        log.debug("TestDataProductProvenance: provenance_dict  %s",
                  str(provenance_dict))

        #validate that products are represented
        self.assertTrue(provenance_dict[str(ctd_l1_conductivity_output_dp_id)])
        self.assertTrue(provenance_dict[str(ctd_l0_conductivity_output_dp_id)])
        self.assertTrue(provenance_dict[str(ctd_l2_density_output_dp_id)])
        self.assertTrue(provenance_dict[str(ctd_l1_temperature_output_dp_id)])
        self.assertTrue(provenance_dict[str(ctd_l0_temperature_output_dp_id)])

        density_dict = (provenance_dict[str(ctd_l2_density_output_dp_id)])
        self.assertEquals(density_dict['producer'],
                          [l2_density_all_data_process_id])

        #-------------------------------
        # Retrieve the extended resource for this data product
        #-------------------------------
        extended_product = self.dpmsclient.get_data_product_extension(
            ctd_l2_density_output_dp_id)
        self.assertEqual(1, len(extended_product.data_processes))
        self.assertEqual(3, len(extended_product.process_input_data_products))
        #        log.debug("TestDataProductProvenance: DataProduct provenance_product_list  %s", str(extended_product.provenance_product_list))
        #        log.debug("TestDataProductProvenance: DataProduct data_processes  %s", str(extended_product.data_processes))
        #        log.debug("TestDataProductProvenance: DataProduct process_input_data_products  %s", str(extended_product.process_input_data_products))
        #        log.debug("TestDataProductProvenance: provenance  %s", str(extended_product.computed.provenance.value))

        #-------------------------------
        # Retrieve the extended resource for this data process
        #-------------------------------
        extended_process_def = self.dataprocessclient.get_data_process_definition_extension(
            ctd_L0_all_dprocdef_id)

        #        log.debug("TestDataProductProvenance: DataProcess extended_process_def  %s", str(extended_process_def))
        #        log.debug("TestDataProductProvenance: DataProcess data_processes  %s", str(extended_process_def.data_processes))
        #        log.debug("TestDataProductProvenance: DataProcess data_products  %s", str(extended_process_def.data_products))
        self.assertEqual(1, len(extended_process_def.data_processes))
        self.assertEqual(3,
                         len(extended_process_def.output_stream_definitions))
        self.assertEqual(3, len(extended_process_def.data_products)
                         )  #one list because of one data process

        #-------------------------------
        # Request the xml report
        #-------------------------------
        results = self.dpmsclient.get_data_product_provenance_report(
            ctd_l2_density_output_dp_id)
        print results

        #-------------------------------
        # Cleanup
        #-------------------------------

        self.dpmsclient.delete_data_product(ctd_parsed_data_product)
        self.dpmsclient.delete_data_product(log_data_product_id)
        self.dpmsclient.delete_data_product(ctd_l0_conductivity_output_dp_id)
        self.dpmsclient.delete_data_product(ctd_l0_pressure_output_dp_id)
        self.dpmsclient.delete_data_product(ctd_l0_temperature_output_dp_id)
        self.dpmsclient.delete_data_product(ctd_l1_conductivity_output_dp_id)
        self.dpmsclient.delete_data_product(ctd_l1_pressure_output_dp_id)
        self.dpmsclient.delete_data_product(ctd_l1_temperature_output_dp_id)
        self.dpmsclient.delete_data_product(ctd_l2_salinity_output_dp_id)
        self.dpmsclient.delete_data_product(ctd_l2_density_output_dp_id)
コード例 #9
0
class TestIntDataProcessManagementService(IonIntegrationTestCase):

    def setUp(self):
        # Start container
        self._start_container()

        # Establish endpoint with container
        container_client = ContainerAgentClient(node=self.container.node, name=self.container.name)
        #print 'got CC client'
        container_client.start_rel_from_url('res/deploy/r2sa.yml')


        # Now create client to DataProcessManagementService
        self.Processclient = DataProcessManagementServiceClient(node=self.container.node)
        self.RRclient = ResourceRegistryServiceClient(node=self.container.node)
        self.DAMSclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.DPMSclient = DataProductManagementServiceClient(node=self.container.node)
        self.PubSubClient = PubsubManagementServiceClient(node=self.container.node)

    def test_createDataProcess(self):


        #-------------------------------
        # Data Process Definition
        #-------------------------------
        log.debug("TestIntDataProcessManagementService: create data process definition")
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='data_process_definition',
                            description='some new dpd',
                            module='ion.processes.data.transforms.transform_example',
                            class_name='TransformExample',
                            process_source='some_source_reference')
        try:
            dprocdef_id = self.Processclient.create_data_process_definition(dpd_obj)
        except BadRequest as ex:
            self.fail("failed to create new data process definition: %s" %ex)


        # test Data Process Definition creation in rr
        dprocdef_obj = self.Processclient.read_data_process_definition(dprocdef_id)
        self.assertEquals(dprocdef_obj.name,'data_process_definition')

        # Create an input instrument
        instrument_obj = IonObject(RT.InstrumentDevice, name='Inst1',description='an instrument that is creating the data product')
        instrument_id, rev = self.RRclient.create(instrument_obj)

        # Register the instrument so that the data producer and stream object are created
        data_producer_id = self.DAMSclient.register_instrument(instrument_id)
        log.debug("TestIntDataProcessManagementService  data_producer_id %s" % data_producer_id)



        #-------------------------------
        # Input Data Product
        #-------------------------------
        log.debug("TestIntDataProcessManagementService: create input data product")
        input_dp_obj = IonObject(RT.DataProduct, name='InputDataProduct', description='some new dp')
        try:
            input_dp_id = self.DPMSclient.create_data_product(input_dp_obj, instrument_id)
        except BadRequest as ex:
            self.fail("failed to create new input data product: %s" %ex)

        # Retrieve the stream via the DataProduct->Stream associations
        stream_ids, _ = self.RRclient.find_objects(input_dp_id, PRED.hasStream, None, True)

        log.debug("TestIntDataProcessManagementService: in stream_ids "   +  str(stream_ids))
        self.in_stream_id = stream_ids[0]
        log.debug("TestIntDataProcessManagementService: Input Stream: "   +  str( self.in_stream_id))

        #-------------------------------
        # Output Data Product
        #-------------------------------
        log.debug("TestIntDataProcessManagementService: create output data product")
        output_dp_obj = IonObject(RT.DataProduct, name='OutDataProduct',description='transform output')
        output_dp_id = self.DPMSclient.create_data_product(output_dp_obj)

        # this will NOT create a stream for the product becuase the data process (source) resource has not been created yet.

        #-------------------------------
        # Create the data process
        #-------------------------------
        log.debug("TestIntDataProcessManagementService: create_data_process start")
        try:
            dproc_id = self.Processclient.create_data_process(dprocdef_id, input_dp_id, output_dp_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" %ex)

        self.DAMSclient.assign_data_product(dproc_id, output_dp_id, False)

        log.debug("TestIntDataProcessManagementService: create_data_process return")

        #-------------------------------
        # Producer (Sample Input)
        #-------------------------------
        # Create a producing example process
        # cheat to make a publisher object to send messages in the test.
        # it is really hokey to pass process=self.cc but it works
        #stream_route = self.PubSubClient.register_producer(exchange_name='producer_doesnt_have_a_name1', stream_id=self.in_stream_id)
        #self.ctd_stream1_publisher = StreamPublisher(node=self.container.node, name=('science_data',stream_route.routing_key), process=self.container)


        pid = self.container.spawn_process(name='dummy_process_for_test',
            module='pyon.ion.process',
            cls='SimpleProcess',
            config={})
        dummy_process = self.container.proc_manager.procs[pid]

        publisher_registrar = StreamPublisherRegistrar(process=dummy_process, node=self.container.node)
        self.ctd_stream1_publisher = publisher_registrar.create_publisher(stream_id=self.in_stream_id)

        msg = {'num':'3'}
        self.ctd_stream1_publisher.publish(msg)

        time.sleep(1)

        msg = {'num':'5'}
        self.ctd_stream1_publisher.publish(msg)

        time.sleep(1)

        msg = {'num':'9'}
        self.ctd_stream1_publisher.publish(msg)

        # See /tmp/transform_output for results.....

        # clean up the data process
        try:
            self.Processclient.delete_data_process(dproc_id)
        except BadRequest as ex:
            self.fail("failed to create new data process definition: %s" %ex)

        with self.assertRaises(NotFound) as e:
            self.Processclient.read_data_process(dproc_id)

        try:
            self.Processclient.delete_data_process_definition(dprocdef_id)
        except BadRequest as ex:
            self.fail("failed to create new data process definition: %s" %ex)

        with self.assertRaises(NotFound) as e:
            self.Processclient.read_data_process_definition(dprocdef_id)
class TestIntDataProcessManagementServiceMultiOut(IonIntegrationTestCase):

    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.pubsubclient =  PubsubManagementServiceClient(node=self.container.node)
        self.ingestclient = IngestionManagementServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node)
        self.datasetclient =  DatasetManagementServiceClient(node=self.container.node)



#    @unittest.skip('not working')
    def test_createDataProcess(self):

        #-------------------------------
        # Data Process Definition
        #-------------------------------
        log.debug("TestIntDataProcessMgmtServiceMultiOut: create data process definition")
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='ctd_L0_all',
                            description='transform ctd package into three separate L0 streams',
                            module='ion.processes.data.transforms.ctd.ctd_L0_all',
                            class_name='ctd_L0_all',
                            process_source='some_source_reference')
        try:
            dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
        except BadRequest as ex:
            self.fail("failed to create new data process definition: %s" %ex)


        # test Data Process Definition creation in rr
        dprocdef_obj = self.dataprocessclient.read_data_process_definition(dprocdef_id)
        self.assertEquals(dprocdef_obj.name,'ctd_L0_all')

        # Create an input instrument
        instrument_obj = IonObject(RT.InstrumentDevice, name='Inst1',description='an instrument that is creating the data product')
        instrument_id, rev = self.rrclient.create(instrument_obj)

        # Register the instrument so that the data producer and stream object are created
        data_producer_id = self.damsclient.register_instrument(instrument_id)
        log.debug("TestIntDataProcessMgmtServiceMultiOut  data_producer_id %s" % data_producer_id)

        # create a stream definition for the data from the ctd simulator
        ctd_stream_def = ctd_stream_definition()
        ctd_stream_def_id = self.pubsubclient.create_stream_definition(container=ctd_stream_def, name='Simulated CTD data')

        self.dataprocessclient.assign_input_stream_definition_to_data_process_definition(ctd_stream_def_id, dprocdef_id )


        #-------------------------------
        # Input Data Product
        #-------------------------------
        log.debug("TestIntDataProcessMgmtServiceMultiOut: create input data product")



        craft = CoverageCraft
        sdom, tdom = craft.create_domains()
        sdom = sdom.dump()
        tdom = tdom.dump()
        parameter_dictionary = craft.create_parameters()
        parameter_dictionary = parameter_dictionary.dump()

        input_dp_obj = IonObject(   RT.DataProduct,
                                    name='InputDataProduct',
                                    description='some new dp',
                                    temporal_domain = tdom,
                                    spatial_domain = sdom)

        input_dp_id = self.dataproductclient.create_data_product(input_dp_obj, ctd_stream_def_id, parameter_dictionary)

        self.damsclient.assign_data_product(instrument_id, input_dp_id)

        # Retrieve the stream via the DataProduct->Stream associations
        stream_ids, _ = self.rrclient.find_objects(input_dp_id, PRED.hasStream, None, True)

        log.debug("TestIntDataProcessMgmtServiceMultiOut: in stream_ids "   +  str(stream_ids))
        self.in_stream_id = stream_ids[0]
        log.debug("TestIntDataProcessMgmtServiceMultiOut: Input Stream: "   +  str( self.in_stream_id))

        #-------------------------------
        # Output Data Product
        #-------------------------------

        outgoing_stream_conductivity = L0_conductivity_stream_definition()
        outgoing_stream_conductivity_id = self.pubsubclient.create_stream_definition(container=outgoing_stream_conductivity, name='conductivity')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_conductivity_id, dprocdef_id )

        outgoing_stream_pressure = L0_pressure_stream_definition()
        outgoing_stream_pressure_id = self.pubsubclient.create_stream_definition(container=outgoing_stream_pressure, name='pressure')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_pressure_id, dprocdef_id )

        outgoing_stream_temperature = L0_temperature_stream_definition()
        outgoing_stream_temperature_id = self.pubsubclient.create_stream_definition(container=outgoing_stream_temperature, name='temperature')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_temperature_id, dprocdef_id )


        self.output_products={}
        log.debug("TestIntDataProcessMgmtServiceMultiOut: create output data product conductivity")

        output_dp_obj = IonObject(RT.DataProduct,
            name='conductivity',
            description='transform output conductivity',
            temporal_domain = tdom,
            spatial_domain = sdom)

        output_dp_id_1 = self.dataproductclient.create_data_product(output_dp_obj, outgoing_stream_conductivity_id, parameter_dictionary)
        self.output_products['conductivity'] = output_dp_id_1
        self.dataproductclient.activate_data_product_persistence(data_product_id=output_dp_id_1)


        log.debug("TestIntDataProcessMgmtServiceMultiOut: create output data product pressure")

        output_dp_obj = IonObject(RT.DataProduct,
            name='pressure',
            description='transform output pressure',
            temporal_domain = tdom,
            spatial_domain = sdom)

        output_dp_id_2 = self.dataproductclient.create_data_product(output_dp_obj, outgoing_stream_pressure_id, parameter_dictionary)
        self.output_products['pressure'] = output_dp_id_2
        self.dataproductclient.activate_data_product_persistence(data_product_id=output_dp_id_2)

        log.debug("TestIntDataProcessMgmtServiceMultiOut: create output data product temperature")

        output_dp_obj = IonObject(RT.DataProduct,
            name='temperature',
            description='transform output ',
            temporal_domain = tdom,
            spatial_domain = sdom)

        output_dp_id_3 = self.dataproductclient.create_data_product(output_dp_obj, outgoing_stream_temperature_id, parameter_dictionary)
        self.output_products['temperature'] = output_dp_id_3
        self.dataproductclient.activate_data_product_persistence(data_product_id=output_dp_id_3)


        #-------------------------------
        # Create the data process
        #-------------------------------
        log.debug("TestIntDataProcessMgmtServiceMultiOut: create_data_process start")
        try:
            dproc_id = self.dataprocessclient.create_data_process(dprocdef_id, [input_dp_id], self.output_products)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" %ex)

        log.debug("TestIntDataProcessMgmtServiceMultiOut: create_data_process return")

        # these assigns happen inside create_data_process
        #self.damsclient.assign_data_product(input_resource_id=dproc_id, data_product_id=output_dp_id_1)
        #self.damsclient.assign_data_product(input_resource_id=dproc_id, data_product_id=output_dp_id_2)
        #self.damsclient.assign_data_product(input_resource_id=dproc_id, data_product_id=output_dp_id_3)




#    @unittest.skip('not working.. Fix activate_data_product_persistence()')
    @patch.dict(CFG, {'endpoint':{'receive':{'timeout': 60}}})
    def test_createDataProcessUsingSim(self):
        #-------------------------------
        # Create InstrumentModel
        #-------------------------------
        instModel_obj = IonObject(RT.InstrumentModel, name='SBE37IMModel', description="SBE37IMModel", model="SBE37IMModel" )
        try:
            instModel_id = self.imsclient.create_instrument_model(instModel_obj)
        except BadRequest as ex:
            self.fail("failed to create new InstrumentModel: %s" %ex)
        print 'test_createDataProcessUsingSim: new InstrumentModel id = ', instModel_id


        #-------------------------------
        # Create InstrumentAgent
        #-------------------------------
        instAgent_obj = IonObject(RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", driver_module="ion.agents.instrument.instrument_agent", driver_class="InstrumentAgent" )
        try:
            instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj)
        except BadRequest as ex:
            self.fail("failed to create new InstrumentAgent: %s" %ex)
        print 'test_createDataProcessUsingSim: new InstrumentAgent id = ', instAgent_id

        self.imsclient.assign_instrument_model_to_instrument_agent(instModel_id, instAgent_id)

        #-------------------------------
        # Create InstrumentDevice
        #-------------------------------
        instDevice_obj = IonObject(RT.InstrumentDevice, name='SBE37IMDevice', description="SBE37IMDevice", serial_number="12345" )
        try:
            instDevice_id = self.imsclient.create_instrument_device(instrument_device=instDevice_obj)
            self.imsclient.assign_instrument_model_to_instrument_device(instModel_id, instDevice_id)
        except BadRequest as ex:
            self.fail("failed to create new InstrumentDevice: %s" %ex)

        print 'test_createDataProcessUsingSim: new InstrumentDevice id = ', instDevice_id

        #-------------------------------
        # Create InstrumentAgentInstance to hold configuration information
        #-------------------------------
        instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='SBE37IMAgentInstance', description="SBE37IMAgentInstance", svr_addr="localhost",
                                          driver_module="mi.instrument.seabird.sbe37smb.ooicore.driver", driver_class="SBE37Driver",
                                          cmd_port=5556, evt_port=5557, comms_method="ethernet", comms_device_address=CFG.device.sbe37.host, comms_device_port=CFG.device.sbe37.port,
                                          comms_server_address="localhost", comms_server_port=8888)
        instAgentInstance_id = self.imsclient.create_instrument_agent_instance(instAgentInstance_obj, instAgent_id, instDevice_id)

        #-------------------------------
        # Create CTD Parsed as the first data product
        #-------------------------------
        # create a stream definition for the data from the ctd simulator
        ctd_stream_def = SBE37_CDM_stream_definition()
        ctd_stream_def_id = self.pubsubclient.create_stream_definition(container=ctd_stream_def)

        print 'test_createDataProcessUsingSim: new Stream Definition id = ', instDevice_id

        print 'Creating new CDM data product with a stream definition'


        craft = CoverageCraft
        sdom, tdom = craft.create_domains()
        sdom = sdom.dump()
        tdom = tdom.dump()
        parameter_dictionary = craft.create_parameters()
        parameter_dictionary = parameter_dictionary.dump()

        dp_obj = IonObject(RT.DataProduct,
            name='ctd_parsed',
            description='ctd stream test',
            temporal_domain = tdom,
            spatial_domain = sdom)

        ctd_parsed_data_product = self.dataproductclient.create_data_product(dp_obj, ctd_stream_def_id, parameter_dictionary)

        print 'new ctd_parsed_data_product_id = ', ctd_parsed_data_product

        self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=ctd_parsed_data_product)

        self.dataproductclient.activate_data_product_persistence(data_product_id=ctd_parsed_data_product)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(ctd_parsed_data_product, PRED.hasStream, None, True)
        print 'test_createDataProcessUsingSim: Data product streams1 = ', stream_ids

        #-------------------------------
        # Create CTD Raw as the second data product
        #-------------------------------
        print 'test_createDataProcessUsingSim: Creating new RAW data product with a stream definition'
        raw_stream_def = SBE37_RAW_stream_definition()
        raw_stream_def_id = self.pubsubclient.create_stream_definition(container=raw_stream_def)


        dp_obj = IonObject(RT.DataProduct,
                            name='ctd_raw',
                            description='raw stream test',
                            temporal_domain = tdom,
                            spatial_domain = sdom)

        ctd_raw_data_product = self.dataproductclient.create_data_product(dp_obj, raw_stream_def_id, parameter_dictionary)
        print 'new ctd_raw_data_product_id = ', ctd_raw_data_product

        self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=ctd_raw_data_product)

        self.dataproductclient.activate_data_product_persistence(data_product_id=ctd_raw_data_product)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(ctd_raw_data_product, PRED.hasStream, None, True)
        print 'Data product streams2 = ', stream_ids
        
        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Data Process Definition
        #-------------------------------
        log.debug("test_createDataProcessUsingSim: create data process definition ctd_L0_all")
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='ctd_L0_all',
                            description='transform ctd package into three separate L0 streams',
                            module='ion.processes.data.transforms.ctd.ctd_L0_all',
                            class_name='ctd_L0_all',
                            process_source='some_source_reference')
        try:
            ctd_L0_all_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
        except BadRequest as ex:
            self.fail("failed to create new ctd_L0_all data process definition: %s" %ex)
            
            
        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Output Data Products
        #-------------------------------

        outgoing_stream_l0_conductivity = L0_conductivity_stream_definition()
        outgoing_stream_l0_conductivity_id = self.pubsubclient.create_stream_definition(container=outgoing_stream_l0_conductivity, name='L0_Conductivity')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_conductivity_id, ctd_L0_all_dprocdef_id )

        outgoing_stream_l0_pressure = L0_pressure_stream_definition()
        outgoing_stream_l0_pressure_id = self.pubsubclient.create_stream_definition(container=outgoing_stream_l0_pressure, name='L0_Pressure')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_pressure_id, ctd_L0_all_dprocdef_id )

        outgoing_stream_l0_temperature = L0_temperature_stream_definition()
        outgoing_stream_l0_temperature_id = self.pubsubclient.create_stream_definition(container=outgoing_stream_l0_temperature, name='L0_Temperature')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_temperature_id, ctd_L0_all_dprocdef_id )


        self.output_products={}
        log.debug("test_createDataProcessUsingSim: create output data product L0 conductivity")

        ctd_l0_conductivity_output_dp_obj = IonObject(  RT.DataProduct,
                                                        name='L0_Conductivity',
                                                        description='transform output conductivity',
                                                        temporal_domain = tdom,
                                                        spatial_domain = sdom)


        ctd_l0_conductivity_output_dp_id = self.dataproductclient.create_data_product(ctd_l0_conductivity_output_dp_obj,
                                                                                outgoing_stream_l0_conductivity_id,
                                                                                parameter_dictionary)
        self.output_products['conductivity'] = ctd_l0_conductivity_output_dp_id
        self.dataproductclient.activate_data_product_persistence(data_product_id=ctd_l0_conductivity_output_dp_id)


        log.debug("test_createDataProcessUsingSim: create output data product L0 pressure")

        ctd_l0_pressure_output_dp_obj = IonObject(RT.DataProduct,
            name='L0_Pressure',
            description='transform output pressure',
            temporal_domain = tdom,
            spatial_domain = sdom)

        ctd_l0_pressure_output_dp_id = self.dataproductclient.create_data_product(ctd_l0_pressure_output_dp_obj,
                                                                                    outgoing_stream_l0_pressure_id,
                                                                                    parameter_dictionary)
        self.output_products['pressure'] = ctd_l0_pressure_output_dp_id
        self.dataproductclient.activate_data_product_persistence(data_product_id=ctd_l0_pressure_output_dp_id)

        log.debug("test_createDataProcessUsingSim: create output data product L0 temperature")

        ctd_l0_temperature_output_dp_obj = IonObject(RT.DataProduct,
            name='L0_Temperature',
            description='transform output temperature',
            temporal_domain = tdom,
            spatial_domain = sdom)


        ctd_l0_temperature_output_dp_id = self.dataproductclient.create_data_product(ctd_l0_temperature_output_dp_obj,
                                                                                    outgoing_stream_l0_temperature_id,
                                                                                    parameter_dictionary)
        self.output_products['temperature'] = ctd_l0_temperature_output_dp_id
        self.dataproductclient.activate_data_product_persistence(data_product_id=ctd_l0_temperature_output_dp_id)


        #-------------------------------
        # Create listener for data process events and verify that events are received.
        #-------------------------------

        # todo: add this validate for Req: L4-CI-SA-RQ-367  Data processing shall notify registered data product consumers about data processing workflow life cycle events

        
        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Create the data process
        #-------------------------------
        log.debug("test_createDataProcessUsingSim: create data_process start")
        try:
            ctd_l0_all_data_process_id = self.dataprocessclient.create_data_process(ctd_L0_all_dprocdef_id, [ctd_parsed_data_product], self.output_products)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" %ex)

        log.debug("test_createDataProcessUsingSim: data_process created: %s", str(ctd_l0_all_data_process_id))


        #-------------------------------
        # Retrieve a list of all data process defintions in RR and validate that the DPD is listed
        #-------------------------------

        # todo: add this validate for Req: L4-CI-SA-RQ-366  Data processing shall manage data topic definitions

        log.debug("test_createDataProcessUsingSim: activate_data_process ")
        self.dataprocessclient.activate_data_process(ctd_l0_all_data_process_id)
        

        #todo: check that activate event is received L4-CI-SA-RQ-367


        # todo: monitor process to se eif it is active (sa-rq-182)
        # todo: This has not yet been completed by CEI, will prbly surface thru a DPMS call
        log.debug("test_createDataProcessUsingSim: call CEI interface to monitor  ")

        self.dataproductclient.suspend_data_product_persistence(data_product_id=ctd_raw_data_product)
        self.dataproductclient.suspend_data_product_persistence(data_product_id=ctd_l0_conductivity_output_dp_id)
        self.dataproductclient.suspend_data_product_persistence(data_product_id=ctd_l0_pressure_output_dp_id)
        self.dataproductclient.suspend_data_product_persistence(data_product_id=ctd_l0_temperature_output_dp_id)

        log.debug("test_createDataProcessUsingSim: deactivate_data_process ")
        self.dataprocessclient.deactivate_data_process(ctd_l0_all_data_process_id)

        self.dataprocessclient.delete_data_process(ctd_l0_all_data_process_id)
コード例 #11
0
class TestDataProductVersions(IonIntegrationTestCase):

    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        print 'started services'

        # Now create client to DataProductManagementService
        self.client = DataProductManagementServiceClient(node=self.container.node)
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.pubsubcli =  PubsubManagementServiceClient(node=self.container.node)
        self.ingestclient = IngestionManagementServiceClient(node=self.container.node)
        self.processdispatchclient   = ProcessDispatcherServiceClient(node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(node=self.container.node)


    #@unittest.skip('not working')
    def test_createDataProductVersionSimple(self):

        ctd_stream_def_id = self.pubsubcli.create_stream_definition( name='test')

        # test creating a new data product which will also create the initial/default version
        log.debug('Creating new data product with a stream definition')

        craft = CoverageCraft
        sdom, tdom = craft.create_domains()
        sdom = sdom.dump()
        tdom = tdom.dump()
        parameter_dictionary = craft.create_parameters()
        parameter_dictionary = parameter_dictionary.dump()

        dp_obj = IonObject(RT.DataProduct,
            name='DP',
            description='some new dp',
            temporal_domain = tdom,
            spatial_domain = sdom)

        dp_id = self.client.create_data_product(dp_obj, ctd_stream_def_id, parameter_dictionary)
        log.debug( 'new dp_id = %s', str(dp_id))

        #test that the links exist
        version_ids, _ = self.rrclient.find_objects(subject=dp_id, predicate=PRED.hasVersion, id_only=True)
        log.debug( 'version_ids = %s', str(version_ids))

        stream_ids, _ = self.rrclient.find_objects(subject=version_ids[0], predicate=PRED.hasStream, id_only=True)
        if not stream_ids:
            self.fail("failed to assoc new data product version with data product stream")

        # test creating a subsequent data product version which will update the data product pointers


        dpv_obj = IonObject(RT.DataProductVersion,
            name='DPV2',
            description='some new dp version')

        dpv2_id = self.client.create_data_product_version(dp_id, dpv_obj)
        log.debug( 'new dpv_id = %s', str(dpv2_id))


        #test that the links exist
        version_ids, _ = self.rrclient.find_objects(subject=dp_id, predicate=PRED.hasVersion, id_only=True)
        if len(version_ids) != 2:
            self.fail("data product should have two versions")

        stream_ids = self.rrclient.find_objects(subject=dpv2_id, predicate=PRED.hasStream, id_only=True)
        if not stream_ids:
            self.fail("failed to assoc second data product version with a stream")

        dataset_ids = self.rrclient.find_objects(subject=dpv2_id, predicate=PRED.hasDataset, id_only=True)
        if not dataset_ids:
            self.fail("failed to assoc second data product version with a dataset")

        dp_stream_ids, _ = self.rrclient.find_objects(subject=dp_id, predicate=PRED.hasStream, id_only=True)
        if not dp_stream_ids:
            self.fail("the data product is not assoc with a stream")



    @unittest.skip('not working')
    def test_createDataProductVersionFromSim(self):

        # ctd simulator process
        producer_definition = ProcessDefinition(name='Example Data Producer')
        producer_definition.executable = {
            'module':'ion.services.sa.test.simple_ctd_data_producer',
            'class':'SimpleCtdDataProducer'
        }

        producer_procdef_id = self.processdispatchclient.create_process_definition(process_definition=producer_definition)


        #-------------------------------
        # Create InstrumentDevice
        #-------------------------------
        instDevice_obj = IonObject(RT.InstrumentDevice, name='SBE37IMDevice', description="SBE37IMDevice", serial_number="12345" )
        try:
            instDevice_id1 = self.imsclient.create_instrument_device(instrument_device=instDevice_obj)
            self.damsclient.register_instrument(instDevice_id1)
        except BadRequest as ex:
            self.fail("failed to create new InstrumentDevice: %s" %ex)

        #-------------------------------
        # Create CTD Parsed as the first data product
        #-------------------------------
        # create a stream definition for the data from the ctd simulator
        ctd_stream_def = SBE37_CDM_stream_definition()
        ctd_stream_def_id = self.pubsubcli.create_stream_definition(container=ctd_stream_def)

        print 'test_createTransformsThenActivateInstrument: new Stream Definition id = ', ctd_stream_def_id

        print 'Creating new CDM data product with a stream definition'

        craft = CoverageCraft
        sdom, tdom = craft.create_domains()
        sdom = sdom.dump()
        tdom = tdom.dump()
        parameter_dictionary = craft.create_parameters()
        parameter_dictionary = parameter_dictionary.dump()

        dp_obj = IonObject(RT.DataProduct,
            name='ctd_parsed',
            description='ctd stream test',
            temporal_domain = tdom,
            spatial_domain = sdom)

        ctd_parsed_data_product = self.dataproductclient.create_data_product(dp_obj, ctd_stream_def_id, parameter_dictionary)
        print 'new ctd_parsed_data_product_id = ', ctd_parsed_data_product

        self.damsclient.assign_data_product(input_resource_id=instDevice_id1, data_product_id=ctd_parsed_data_product)

        self.dataproductclient.activate_data_product_persistence(data_product_id=ctd_parsed_data_product)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(ctd_parsed_data_product, PRED.hasStream, None, True)
        print 'test_createTransformsThenActivateInstrument: Data product streams1 = ', stream_ids
        self.parsed_stream_id = stream_ids[0]

        #-------------------------------
        # Streaming
        #-------------------------------

        # Start the ctd simulator to produce some data
        configuration = {
            'process':{
                'stream_id':self.parsed_stream_id,
                }
        }
        producer_pid = self.processdispatchclient.schedule_process(process_definition_id= producer_procdef_id, configuration=configuration)

        time.sleep(2.0)

        # clean up the launched processes
        self.processdispatchclient.cancel_process(producer_pid)



        #-------------------------------
        # Create InstrumentDevice 2
        #-------------------------------
        instDevice_obj = IonObject(RT.InstrumentDevice, name='SBE37IMDevice2', description="SBE37IMDevice", serial_number="6789" )
        try:
            instDevice_id2 = self.imsclient.create_instrument_device(instrument_device=instDevice_obj)
            self.damsclient.register_instrument(instDevice_id2)
        except BadRequest as ex:
            self.fail("failed to create new InstrumentDevice2: %s" %ex)

        #-------------------------------
        # Create CTD Parsed as the new version of the original data product
        #-------------------------------
        # create a stream definition for the data from the ctd simulator

        dataproductversion_obj = IonObject(RT.DataProduct,
            name='CTDParsedV2',
            description="new version" ,
            temporal_domain = tdom,
            spatial_domain = sdom)

        ctd_parsed_data_product_new_version = self.dataproductclient.create_data_product_version(ctd_parsed_data_product, dataproductversion_obj)

        print 'new ctd_parsed_data_product_version_id = ', ctd_parsed_data_product_new_version

        self.damsclient.assign_data_product(input_resource_id=instDevice_id1, data_product_id=ctd_parsed_data_product, data_product_version_id=ctd_parsed_data_product_new_version)
        #-------------------------------
        # ACTIVATE PERSISTANCE FOR DATA PRODUCT VERSIONS NOT IMPL YET!!!!!!!!
        #-------------------------------
        #self.dataproductclient.activate_data_product_persistence(data_product_id=ctd_parsed_data_product_new_version)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(ctd_parsed_data_product_new_version, PRED.hasStream, None, True)
        print 'test_createTransformsThenActivateInstrument: Data product streams2 = ', stream_ids
        self.parsed_stream_id2 = stream_ids[0]

        #-------------------------------
        # Streaming
        #-------------------------------

        # Start the ctd simulator to produce some data
        configuration = {
            'process':{
                'stream_id':self.parsed_stream_id2,
                }
        }
        producer_pid = self.processdispatchclient.schedule_process(process_definition_id= producer_procdef_id, configuration=configuration)

        time.sleep(2.0)

        # clean up the launched processes
        self.processdispatchclient.cancel_process(producer_pid)
class TestIntDataProcessManagementServiceMultiOut(IonIntegrationTestCase):

    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.pubsubclient =  PubsubManagementServiceClient(node=self.container.node)
        self.ingestclient = IngestionManagementServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node)
        self.datasetclient =  DatasetManagementServiceClient(node=self.container.node)
        self.dataset_management = self.datasetclient
        self.process_dispatcher = ProcessDispatcherServiceClient(node=self.container.node)

    def test_createDataProcess(self):

        #---------------------------------------------------------------------------
        # Data Process Definition
        #---------------------------------------------------------------------------
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='ctd_L0_all',
                            description='transform ctd package into three separate L0 streams',
                            module='ion.processes.data.transforms.ctd.ctd_L0_all',
                            class_name='ctd_L0_all')
        dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)

        # Make assertion on the newly registered data process definition
        data_process_definition = self.rrclient.read(dprocdef_id)
        self.assertEquals(data_process_definition.name, 'ctd_L0_all')
        self.assertEquals(data_process_definition.description, 'transform ctd package into three separate L0 streams')
        self.assertEquals(data_process_definition.module, 'ion.processes.data.transforms.ctd.ctd_L0_all')
        self.assertEquals(data_process_definition.class_name, 'ctd_L0_all')

        # Read the data process definition using data process management and make assertions
        dprocdef_obj = self.dataprocessclient.read_data_process_definition(dprocdef_id)
        self.assertEquals(dprocdef_obj.class_name,'ctd_L0_all')
        self.assertEquals(dprocdef_obj.module,'ion.processes.data.transforms.ctd.ctd_L0_all')

        #---------------------------------------------------------------------------
        # Create an input instrument
        #---------------------------------------------------------------------------

        instrument_obj = IonObject(RT.InstrumentDevice, name='Inst1',description='an instrument that is creating the data product')
        instrument_id, rev = self.rrclient.create(instrument_obj)

        # Register the instrument so that the data producer and stream object are created
        data_producer_id = self.damsclient.register_instrument(instrument_id)

        # create a stream definition for the data from the ctd simulator
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        ctd_stream_def_id = self.pubsubclient.create_stream_definition(name='Simulated CTD data', parameter_dictionary_id=pdict_id)

        self.dataprocessclient.assign_input_stream_definition_to_data_process_definition(ctd_stream_def_id, dprocdef_id )

        # Assert that the link between the stream definition and the data process definition was done
        assocs = self.rrclient.find_associations(subject=dprocdef_id, predicate=PRED.hasInputStreamDefinition, object=ctd_stream_def_id, id_only=True)

        self.assertIsNotNone(assocs)

        #---------------------------------------------------------------------------
        # Input Data Product
        #---------------------------------------------------------------------------
        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()


        input_dp_obj = IonObject(   RT.DataProduct,
                                    name='InputDataProduct',
                                    description='some new dp',
                                    temporal_domain = tdom,
                                    spatial_domain = sdom)

        input_dp_id = self.dataproductclient.create_data_product(data_product=input_dp_obj, stream_definition_id=ctd_stream_def_id, exchange_point='test')

        #Make assertions on the input data product created
        input_dp_obj = self.rrclient.read(input_dp_id)
        self.assertEquals(input_dp_obj.name, 'InputDataProduct')
        self.assertEquals(input_dp_obj.description, 'some new dp')

        self.damsclient.assign_data_product(instrument_id, input_dp_id)

        # Retrieve the stream via the DataProduct->Stream associations
        stream_ids, _ = self.rrclient.find_objects(input_dp_id, PRED.hasStream, None, True)

        self.in_stream_id = stream_ids[0]

        #---------------------------------------------------------------------------
        # Output Data Product
        #---------------------------------------------------------------------------

        outgoing_stream_conductivity_id = self.pubsubclient.create_stream_definition(name='conductivity', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_conductivity_id, dprocdef_id,binding='conductivity' )

        outgoing_stream_pressure_id = self.pubsubclient.create_stream_definition(name='pressure', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_pressure_id, dprocdef_id, binding='pressure' )

        outgoing_stream_temperature_id = self.pubsubclient.create_stream_definition(name='temperature', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_temperature_id, dprocdef_id, binding='temperature' )


        self.output_products={}

        output_dp_obj = IonObject(RT.DataProduct,
            name='conductivity',
            description='transform output conductivity',
            temporal_domain = tdom,
            spatial_domain = sdom)

        output_dp_id_1 = self.dataproductclient.create_data_product(output_dp_obj, outgoing_stream_conductivity_id)
        self.output_products['conductivity'] = output_dp_id_1

        output_dp_obj = IonObject(RT.DataProduct,
            name='pressure',
            description='transform output pressure',
            temporal_domain = tdom,
            spatial_domain = sdom)

        output_dp_id_2 = self.dataproductclient.create_data_product(output_dp_obj, outgoing_stream_pressure_id)
        self.output_products['pressure'] = output_dp_id_2

        output_dp_obj = IonObject(RT.DataProduct,
            name='temperature',
            description='transform output ',
            temporal_domain = tdom,
            spatial_domain = sdom)

        output_dp_id_3 = self.dataproductclient.create_data_product(output_dp_obj, outgoing_stream_temperature_id)
        self.output_products['temperature'] = output_dp_id_3


        #---------------------------------------------------------------------------
        # Create the data process
        #---------------------------------------------------------------------------
        def _create_data_process():
            dproc_id = self.dataprocessclient.create_data_process(dprocdef_id, [input_dp_id], self.output_products)
            return dproc_id

        dproc_id = _create_data_process()

        # Make assertions on the data process created
        data_process = self.dataprocessclient.read_data_process(dproc_id)

        # Assert that the data process has a process id attached
        self.assertIsNotNone(data_process.process_id)

        # Assert that the data process got the input data product's subscription id attached as its own input_susbcription_id attribute
        self.assertIsNotNone(data_process.input_subscription_id)

        output_data_product_ids = self.rrclient.find_objects(subject=dproc_id, predicate=PRED.hasOutputProduct, object_type=RT.DataProduct, id_only=True)

        self.assertEquals(Set(output_data_product_ids[0]), Set([output_dp_id_1,output_dp_id_2,output_dp_id_3]))


    @patch.dict(CFG, {'endpoint':{'receive':{'timeout': 60}}})
    def test_createDataProcessUsingSim(self):
        #-------------------------------
        # Create InstrumentModel
        #-------------------------------
        instModel_obj = IonObject(RT.InstrumentModel, name='SBE37IMModel', description="SBE37IMModel" )
        instModel_id = self.imsclient.create_instrument_model(instModel_obj)

        #-------------------------------
        # Create InstrumentAgent
        #-------------------------------
        instAgent_obj = IonObject(RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", driver_uri="http://sddevrepo.oceanobservatories.org/releases/seabird_sbe37smb_ooicore-0.0.1-py2.7.egg")
        instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj)

        self.imsclient.assign_instrument_model_to_instrument_agent(instModel_id, instAgent_id)

        #-------------------------------
        # Create InstrumentDevice
        #-------------------------------
        instDevice_obj = IonObject(RT.InstrumentDevice, name='SBE37IMDevice', description="SBE37IMDevice", serial_number="12345" )
        instDevice_id = self.imsclient.create_instrument_device(instrument_device=instDevice_obj)
        self.imsclient.assign_instrument_model_to_instrument_device(instModel_id, instDevice_id)

        #-------------------------------
        # Create InstrumentAgentInstance to hold configuration information
        #-------------------------------


        port_agent_config = {
            'device_addr': 'sbe37-simulator.oceanobservatories.org',
            'device_port': 4001,
            'process_type': PortAgentProcessType.UNIX,
            'binary_path': "port_agent",
            'command_port': 4002,
            'data_port': 4003,
            'log_level': 5,
        }


        port_agent_config = {
            'device_addr':  CFG.device.sbe37.host,
            'device_port':  CFG.device.sbe37.port,
            'process_type': PortAgentProcessType.UNIX,
            'binary_path': "port_agent",
            'port_agent_addr': 'localhost',
            'command_port': CFG.device.sbe37.port_agent_cmd_port,
            'data_port': CFG.device.sbe37.port_agent_data_port,
            'log_level': 5,
            'type': PortAgentType.ETHERNET
        }

        instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='SBE37IMAgentInstance', description="SBE37IMAgentInstance",
                                          port_agent_config = port_agent_config)
        instAgentInstance_id = self.imsclient.create_instrument_agent_instance(instAgentInstance_obj, instAgent_id, instDevice_id)


        #-------------------------------
        # Create CTD Parsed as the first data product
        #-------------------------------
        # create a stream definition for the data from the ctd simulator
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        ctd_stream_def_id = self.pubsubclient.create_stream_definition(name='SBE32_CDM', parameter_dictionary_id=pdict_id)

        # Construct temporal and spatial Coordinate Reference System objects
        tdom, sdom = time_series_domain()

        sdom = sdom.dump()
        tdom = tdom.dump()



        dp_obj = IonObject(RT.DataProduct,
            name='ctd_parsed',
            description='ctd stream test',
            temporal_domain = tdom,
            spatial_domain = sdom)

        ctd_parsed_data_product = self.dataproductclient.create_data_product(dp_obj, ctd_stream_def_id)

        self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=ctd_parsed_data_product)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(ctd_parsed_data_product, PRED.hasStream, None, True)

        #-------------------------------
        # Create CTD Raw as the second data product
        #-------------------------------
        raw_stream_def_id = self.pubsubclient.create_stream_definition(name='SBE37_RAW', parameter_dictionary_id=pdict_id)

        dp_obj.name = 'ctd_raw'
        ctd_raw_data_product = self.dataproductclient.create_data_product(dp_obj, raw_stream_def_id)

        self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=ctd_raw_data_product)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(ctd_raw_data_product, PRED.hasStream, None, True)

        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Data Process Definition
        #-------------------------------
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='ctd_L0_all',
                            description='transform ctd package into three separate L0 streams',
                            module='ion.processes.data.transforms.ctd.ctd_L0_all',
                            class_name='ctd_L0_all')
        ctd_L0_all_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
            
        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Output Data Products
        #-------------------------------

        outgoing_stream_l0_conductivity_id = self.pubsubclient.create_stream_definition(name='L0_Conductivity', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_conductivity_id, ctd_L0_all_dprocdef_id, binding='conductivity' )

        outgoing_stream_l0_pressure_id = self.pubsubclient.create_stream_definition(name='L0_Pressure', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_pressure_id, ctd_L0_all_dprocdef_id, binding='pressure' )

        outgoing_stream_l0_temperature_id = self.pubsubclient.create_stream_definition(name='L0_Temperature', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_temperature_id, ctd_L0_all_dprocdef_id, binding='temperature' )


        self.output_products={}

        ctd_l0_conductivity_output_dp_obj = IonObject(  RT.DataProduct,
                                                        name='L0_Conductivity',
                                                        description='transform output conductivity',
                                                        temporal_domain = tdom,
                                                        spatial_domain = sdom)


        ctd_l0_conductivity_output_dp_id = self.dataproductclient.create_data_product(ctd_l0_conductivity_output_dp_obj,
                                                                                outgoing_stream_l0_conductivity_id)
        self.output_products['conductivity'] = ctd_l0_conductivity_output_dp_id

        ctd_l0_pressure_output_dp_obj = IonObject(RT.DataProduct,
            name='L0_Pressure',
            description='transform output pressure',
            temporal_domain = tdom,
            spatial_domain = sdom)

        ctd_l0_pressure_output_dp_id = self.dataproductclient.create_data_product(ctd_l0_pressure_output_dp_obj,
                                                                                    outgoing_stream_l0_pressure_id)
        self.output_products['pressure'] = ctd_l0_pressure_output_dp_id

        ctd_l0_temperature_output_dp_obj = IonObject(RT.DataProduct,
            name='L0_Temperature',
            description='transform output temperature',
            temporal_domain = tdom,
            spatial_domain = sdom)


        ctd_l0_temperature_output_dp_id = self.dataproductclient.create_data_product(ctd_l0_temperature_output_dp_obj,
                                                                                    outgoing_stream_l0_temperature_id)
        self.output_products['temperature'] = ctd_l0_temperature_output_dp_id


        #-------------------------------
        # Create listener for data process events and verify that events are received.
        #-------------------------------

        # todo: add this validate for Req: L4-CI-SA-RQ-367  Data processing shall notify registered data product consumers about data processing workflow life cycle events
        #todo (contd) ... I believe the capability does not exist yet now. ANS And SA are not yet publishing any workflow life cycle events (Swarbhanu)
        
        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Create the data process
        #-------------------------------

        ctd_l0_all_data_process_id = self.dataprocessclient.create_data_process(ctd_L0_all_dprocdef_id, [ctd_parsed_data_product], self.output_products)
        data_process = self.rrclient.read(ctd_l0_all_data_process_id)
        process_id = data_process.process_id
        self.addCleanup(self.process_dispatcher.cancel_process, process_id)

        #-------------------------------
        # Wait until the process launched in the create_data_process() method is actually running, before proceeding further in this test
        #-------------------------------

        gate = ProcessStateGate(self.process_dispatcher.read_process, process_id, ProcessStateEnum.RUNNING)
        self.assertTrue(gate.await(30), "The data process (%s) did not spawn in 30 seconds" % process_id)

        #-------------------------------
        # Retrieve a list of all data process defintions in RR and validate that the DPD is listed
        #-------------------------------

        # todo: Req: L4-CI-SA-RQ-366  Data processing shall manage data topic definitions
        # todo: data topics are being handled by pub sub at the level of streams
        self.dataprocessclient.activate_data_process(ctd_l0_all_data_process_id)
        

        #todo: check that activate event is received L4-CI-SA-RQ-367
        #todo... (it looks like no event is being published when the data process is activated... so below, we just check for now
        # todo... that the subscription is indeed activated) (Swarbhanu)


        # todo: monitor process to see if it is active (sa-rq-182)
        ctd_l0_all_data_process = self.rrclient.read(ctd_l0_all_data_process_id)
        input_subscription_id = ctd_l0_all_data_process.input_subscription_id
        subs = self.rrclient.read(input_subscription_id)
        self.assertTrue(subs.activated)

        # todo: This has not yet been completed by CEI, will prbly surface thru a DPMS call
        self.dataprocessclient.deactivate_data_process(ctd_l0_all_data_process_id)


        #-------------------------------
        # Retrieve the extended resources for data process definition and for data process
        #-------------------------------
        extended_process_definition = self.dataprocessclient.get_data_process_definition_extension(ctd_L0_all_dprocdef_id)
        self.assertEqual(1, len(extended_process_definition.data_processes))
        log.debug("test_createDataProcess: extended_process_definition  %s", str(extended_process_definition))

        extended_process = self.dataprocessclient.get_data_process_extension(ctd_l0_all_data_process_id)
        self.assertEqual(1, len(extended_process.input_data_products))
        log.debug("test_createDataProcess: extended_process  %s", str(extended_process))

        ################################ Test the removal of data processes ##################################

        #-------------------------------------------------------------------
        # Test the deleting of the data process
        #-------------------------------------------------------------------

        # Before deleting, get the input streams, output streams and the subscriptions so that they can be checked after deleting
#        dp_obj_1 = self.rrclient.read(ctd_l0_all_data_process_id)
#        input_subscription_id = dp_obj_1.input_subscription_id
#        out_prods, _ = self.rrclient.find_objects(subject=ctd_l0_all_data_process_id, predicate=PRED.hasOutputProduct, id_only=True)
#        in_prods, _ = self.rrclient.find_objects(ctd_l0_all_data_process_id, PRED.hasInputProduct, id_only=True)
#        in_streams = []
#        for in_prod in in_prods:
#            streams, _ = self.rrclient.find_objects(in_prod, PRED.hasStream, id_only=True)
#            in_streams.extend(streams)
#        out_streams = []
#        for out_prod in out_prods:
#            streams, _ = self.rrclient.find_objects(out_prod, PRED.hasStream, id_only=True)
#            out_streams.extend(streams)

        # Deleting the data process
        self.dataprocessclient.delete_data_process(ctd_l0_all_data_process_id)

        # Check that the data process got removed. Check the lcs state. It should be retired
        dp_obj = self.rrclient.read(ctd_l0_all_data_process_id)
        self.assertEquals(dp_obj.lcstate, LCS.RETIRED)

        # Check for process defs still attached to the data process
        dpd_assn_ids = self.rrclient.find_associations(subject=ctd_l0_all_data_process_id,  predicate=PRED.hasProcessDefinition, id_only=True)
        self.assertEquals(len(dpd_assn_ids), 0)

        # Check for output data product still attached to the data process
        out_products, assocs = self.rrclient.find_objects(subject=ctd_l0_all_data_process_id, predicate=PRED.hasOutputProduct, id_only=True)
        self.assertEquals(len(out_products), 0)
        self.assertEquals(len(assocs), 0)

        # Check for input data products still attached to the data process
        inprod_associations = self.rrclient.find_associations(ctd_l0_all_data_process_id, PRED.hasInputProduct)
        self.assertEquals(len(inprod_associations), 0)

        # Check for input data products still attached to the data process
        inprod_associations = self.rrclient.find_associations(ctd_l0_all_data_process_id, PRED.hasInputProduct)
        self.assertEquals(len(inprod_associations), 0)

        # Check of the data process has been deactivated
        self.assertIsNone(dp_obj.input_subscription_id)

        # Read the original subscription id of the data process and check that it has been deactivated
        with self.assertRaises(NotFound):
            self.pubsubclient.read_subscription(input_subscription_id)

        #-------------------------------------------------------------------
        # Delete the data process definition
        #-------------------------------------------------------------------

        # before deleting, get the process definition being associated to in order to be able to check later if the latter gets deleted as it should
        proc_def_ids, proc_def_asocs = self.rrclient.find_objects(ctd_l0_all_data_process_id, PRED.hasProcessDefinition)
        self.dataprocessclient.delete_data_process_definition(ctd_L0_all_dprocdef_id)

        # check that the data process definition has been retired
        dp_proc_def = self.rrclient.read(ctd_L0_all_dprocdef_id)
        self.assertEquals(dp_proc_def.lcstate, LCS.RETIRED)

        # Check for old associations of this data process definition
        proc_defs, proc_def_asocs = self.rrclient.find_objects(ctd_L0_all_dprocdef_id, PRED.hasProcessDefinition)
        self.assertEquals(len(proc_defs), 0)

        # find all associations where this is the subject
        _, obj_assns = self.rrclient.find_objects(subject= ctd_L0_all_dprocdef_id, id_only=True)
        self.assertEquals(len(obj_assns), 0)

        ################################ Test the removal of data processes ##################################
        # Try force delete... This should simply delete the associations and the data process object
        # from the resource registry

        #---------------------------------------------------------------------------------------------------------------
        # Force deleting a data process
        #---------------------------------------------------------------------------------------------------------------
        self.dataprocessclient.force_delete_data_process(ctd_l0_all_data_process_id)

        # find all associations where this is the subject
        _, obj_assns = self.rrclient.find_objects(subject=ctd_l0_all_data_process_id, id_only=True)

        # find all associations where this is the object
        _, sbj_assns = self.rrclient.find_subjects(object=ctd_l0_all_data_process_id, id_only=True)

        self.assertEquals(len(obj_assns), 0)
        self.assertEquals(len(sbj_assns), 0)
        
        with self.assertRaises(NotFound):
            self.rrclient.read(ctd_l0_all_data_process_id)

        #---------------------------------------------------------------------------------------------------------------
        # Force deleting a data process definition
        #---------------------------------------------------------------------------------------------------------------
        self.dataprocessclient.force_delete_data_process_definition(ctd_L0_all_dprocdef_id)

        # find all associations where this is the subject
        _, obj_assns = self.rrclient.find_objects(subject=ctd_l0_all_data_process_id, id_only=True)

        # find all associations where this is the object
        _, sbj_assns = self.rrclient.find_subjects(object=ctd_l0_all_data_process_id, id_only=True)

        self.assertEquals(len(obj_assns), 0)
        self.assertEquals(len(sbj_assns), 0)

        with self.assertRaises(NotFound):
            self.rrclient.read(ctd_l0_all_data_process_id)


    def test_transform_function_crd(self):
        tf = TransformFunction(name='simple', module='pyon.ion.process', cls='SimpleProcess')

        tf_id = self.dataprocessclient.create_transform_function(tf)
        self.assertTrue(tf_id)

        tf2_id = self.dataprocessclient.create_transform_function(tf)
        self.assertEquals(tf_id, tf2_id)

        tf_obj = self.dataprocessclient.read_transform_function(tf_id)
        self.assertEquals([tf.name, tf.module, tf.cls, tf.function_type], [tf_obj.name, tf_obj.module, tf_obj.cls, tf_obj.function_type])

        tf.module = 'dev.null'
        self.assertRaises(BadRequest, self.dataprocessclient.create_transform_function, tf)

        self.dataprocessclient.delete_transform_function(tf_id)
        self.assertRaises(NotFound, self.dataprocessclient.read_transform_function, tf_id)
コード例 #13
0
class TestOmsLaunch(IonIntegrationTestCase):
    def setUp(self):
        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.omsclient = ObservatoryManagementServiceClient(
            node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(
            node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(
            node=self.container.node)
        self.dpclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.pubsubcli = PubsubManagementServiceClient(
            node=self.container.node)
        self.processdispatchclient = ProcessDispatcherServiceClient(
            node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(
            node=self.container.node)
        self.dataset_management = DatasetManagementServiceClient()

        # Use the network definition provided by RSN OMS directly.
        rsn_oms = CIOMSClientFactory.create_instance(DVR_CONFIG['oms_uri'])
        self._network_definition = RsnOmsUtil.build_network_definition(rsn_oms)
        # get serialized version for the configuration:
        self._network_definition_ser = NetworkUtil.serialize_network_definition(
            self._network_definition)
        if log.isEnabledFor(logging.DEBUG):
            log.debug("NetworkDefinition serialization:\n%s",
                      self._network_definition_ser)

        self.platformModel_id = None

        self.all_platforms = {}
        self.agent_streamconfig_map = {}

        self._async_data_result = AsyncResult()
        self._data_subscribers = []
        self._samples_received = []
        self.addCleanup(self._stop_data_subscribers)

        self._async_event_result = AsyncResult()
        self._event_subscribers = []
        self._events_received = []
        self.addCleanup(self._stop_event_subscribers)
        self._start_event_subscriber()

        self._set_up_DataProduct_obj()
        self._set_up_PlatformModel_obj()

    def _set_up_DataProduct_obj(self):
        # Create data product object to be used for each of the platform log streams
        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()

        self.pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            'platform_eng_parsed', id_only=True)
        self.platform_eng_stream_def_id = self.pubsubcli.create_stream_definition(
            name='platform_eng', parameter_dictionary_id=self.pdict_id)
        self.dp_obj = IonObject(RT.DataProduct,
                                name='platform_eng data',
                                description='platform_eng test',
                                temporal_domain=tdom,
                                spatial_domain=sdom)

    def _set_up_PlatformModel_obj(self):
        # Create PlatformModel
        platformModel_obj = IonObject(RT.PlatformModel,
                                      name='RSNPlatformModel',
                                      description="RSNPlatformModel")
        try:
            self.platformModel_id = self.imsclient.create_platform_model(
                platformModel_obj)
        except BadRequest as ex:
            self.fail("failed to create new PLatformModel: %s" % ex)
        log.debug('new PlatformModel id = %s', self.platformModel_id)

    def _traverse(self, pnode, platform_id, parent_platform_objs=None):
        """
        Recursive routine that repeatedly calls _prepare_platform to build
        the object dictionary for each platform.

        @param pnode PlatformNode
        @param platform_id ID of the platform to be visited
        @param parent_platform_objs dict of objects associated to parent
                        platform, if any.

        @retval the dict returned by _prepare_platform at this level.
        """

        log.info("Starting _traverse for %r", platform_id)

        plat_objs = self._prepare_platform(pnode, platform_id,
                                           parent_platform_objs)

        self.all_platforms[platform_id] = plat_objs

        # now, traverse the children:
        for sub_pnode in pnode.subplatforms.itervalues():
            subplatform_id = sub_pnode.platform_id
            self._traverse(sub_pnode, subplatform_id, plat_objs)

        return plat_objs

    def _prepare_platform(self, pnode, platform_id, parent_platform_objs):
        """
        This routine generalizes the manual construction originally done in
        test_oms_launch.py. It is called by the recursive _traverse method so
        all platforms starting from a given base platform are prepared.

        Note: For simplicity in this test, sites are organized in the same
        hierarchical way as the platforms themselves.

        @param pnode PlatformNode
        @param platform_id ID of the platform to be visited
        @param parent_platform_objs dict of objects associated to parent
                        platform, if any.

        @retval a dict of associated objects similar to those in
                test_oms_launch
        """

        site__obj = IonObject(RT.PlatformSite,
                              name='%s_PlatformSite' % platform_id,
                              description='%s_PlatformSite platform site' %
                              platform_id)

        site_id = self.omsclient.create_platform_site(site__obj)

        if parent_platform_objs:
            # establish hasSite association with the parent
            self.rrclient.create_association(
                subject=parent_platform_objs['site_id'],
                predicate=PRED.hasSite,
                object=site_id)

        # prepare platform attributes and ports:
        monitor_attribute_objs, monitor_attribute_dicts = self._prepare_platform_attributes(
            pnode, platform_id)

        port_objs, port_dicts = self._prepare_platform_ports(
            pnode, platform_id)

        device__obj = IonObject(
            RT.PlatformDevice,
            name='%s_PlatformDevice' % platform_id,
            description='%s_PlatformDevice platform device' % platform_id,
            #                        ports=port_objs,
            #                        platform_monitor_attributes = monitor_attribute_objs
        )

        device__dict = dict(
            ports=port_dicts,
            platform_monitor_attributes=monitor_attribute_dicts)

        self.device_id = self.imsclient.create_platform_device(device__obj)

        self.imsclient.assign_platform_model_to_platform_device(
            self.platformModel_id, self.device_id)
        self.rrclient.create_association(subject=site_id,
                                         predicate=PRED.hasDevice,
                                         object=self.device_id)
        self.damsclient.register_instrument(instrument_id=self.device_id)

        if parent_platform_objs:
            # establish hasDevice association with the parent
            self.rrclient.create_association(
                subject=parent_platform_objs['device_id'],
                predicate=PRED.hasDevice,
                object=self.device_id)

        agent__obj = IonObject(RT.PlatformAgent,
                               name='%s_PlatformAgent' % platform_id,
                               description='%s_PlatformAgent platform agent' %
                               platform_id)

        agent_id = self.imsclient.create_platform_agent(agent__obj)

        if parent_platform_objs:
            # add this platform_id to parent's children:
            parent_platform_objs['children'].append(platform_id)

        self.imsclient.assign_platform_model_to_platform_agent(
            self.platformModel_id, agent_id)

        #        agent_instance_obj = IonObject(RT.PlatformAgentInstance,
        #                                name='%s_PlatformAgentInstance' % platform_id,
        #                                description="%s_PlatformAgentInstance" % platform_id)
        #
        #        agent_instance_id = self.imsclient.create_platform_agent_instance(
        #                            agent_instance_obj, agent_id, device_id)

        plat_objs = {
            'platform_id': platform_id,
            'site__obj': site__obj,
            'site_id': site_id,
            'device__obj': device__obj,
            'device_id': self.device_id,
            'agent__obj': agent__obj,
            'agent_id': agent_id,
            #            'agent_instance_obj': agent_instance_obj,
            #            'agent_instance_id':  agent_instance_id,
            'children': []
        }

        log.info("plat_objs for platform_id %r = %s", platform_id,
                 str(plat_objs))

        stream_config = self._create_stream_config(plat_objs)
        self.agent_streamconfig_map[platform_id] = stream_config
        #        self.agent_streamconfig_map[platform_id] = None
        #        self._start_data_subscriber(agent_instance_id, stream_config)

        return plat_objs

    def _prepare_platform_attributes(self, pnode, platform_id):
        """
        Returns the list of PlatformMonitorAttributes objects corresponding to
        the attributes associated to the given platform.
        """
        # TODO complete the clean-up of this method
        ret_infos = dict((n, a.defn) for (n, a) in pnode.attrs.iteritems())

        monitor_attribute_objs = []
        monitor_attribute_dicts = []
        for attrName, attrDfn in ret_infos.iteritems():
            log.debug("platform_id=%r: preparing attribute=%r", platform_id,
                      attrName)

            monitor_rate = attrDfn['monitorCycleSeconds']
            units = attrDfn['units']

            plat_attr_obj = IonObject(OT.PlatformMonitorAttributes,
                                      id=attrName,
                                      monitor_rate=monitor_rate,
                                      units=units)

            plat_attr_dict = dict(id=attrName,
                                  monitor_rate=monitor_rate,
                                  units=units)

            monitor_attribute_objs.append(plat_attr_obj)
            monitor_attribute_dicts.append(plat_attr_dict)

        return monitor_attribute_objs, monitor_attribute_dicts

    def _prepare_platform_ports(self, pnode, platform_id):
        """
        Returns the list of PlatformPort objects corresponding to the ports
        associated to the given platform.
        """
        # TODO complete the clean-up of this method

        port_objs = []
        port_dicts = []
        for port_id, network in pnode.ports.iteritems():
            log.debug("platform_id=%r: preparing port=%r network=%s",
                      platform_id, port_id, network)

            #
            # Note: the name "IP" address has been changed to "network" address
            # in the CI-OMS interface spec.
            #
            plat_port_obj = IonObject(OT.PlatformPort,
                                      port_id=port_id,
                                      ip_address=network)

            plat_port_dict = dict(port_id=port_id, network=network)

            port_objs.append(plat_port_obj)

            port_dicts.append(plat_port_dict)

        return port_objs, port_dicts

    def _create_stream_config(self, plat_objs):

        platform_id = plat_objs['platform_id']
        device_id = plat_objs['device_id']

        #create the log data product
        self.dp_obj.name = '%s platform_eng data' % platform_id
        self.data_product_id = self.dpclient.create_data_product(
            data_product=self.dp_obj,
            stream_definition_id=self.platform_eng_stream_def_id)
        self.damsclient.assign_data_product(
            input_resource_id=self.device_id,
            data_product_id=self.data_product_id)
        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(self.data_product_id,
                                                   PRED.hasStream, None, True)

        stream_config = self._build_stream_config(stream_ids[0])
        return stream_config

    def _build_stream_config(self, stream_id=''):

        platform_eng_dictionary = DatasetManagementService.get_parameter_dictionary_by_name(
            'platform_eng_parsed')

        #get the streamroute object from pubsub by passing the stream_id
        stream_def_ids, _ = self.rrclient.find_objects(
            stream_id, PRED.hasStreamDefinition, RT.StreamDefinition, True)

        stream_route = self.pubsubcli.read_stream_route(stream_id=stream_id)
        stream_config = {
            'routing_key': stream_route.routing_key,
            'stream_id': stream_id,
            'stream_definition_ref': stream_def_ids[0],
            'exchange_point': stream_route.exchange_point,
            'parameter_dictionary': platform_eng_dictionary.dump()
        }

        return stream_config

    def _set_platform_agent_instances(self):
        """
        Once most of the objs/defs associated with all platforms are in
        place, this method creates and associates the PlatformAgentInstance
        elements.
        """

        self.platform_configs = {}
        for platform_id, plat_objs in self.all_platforms.iteritems():

            PLATFORM_CONFIG = {
                'platform_id': platform_id,
                'agent_streamconfig_map': None,  #self.agent_streamconfig_map,
                'driver_config': DVR_CONFIG,
                'network_definition': self._network_definition_ser
            }

            self.platform_configs[platform_id] = {
                'platform_id': platform_id,
                'agent_streamconfig_map': self.agent_streamconfig_map,
                'driver_config': DVR_CONFIG,
                'network_definition': self._network_definition_ser
            }

            agent_config = {
                'platform_config': PLATFORM_CONFIG,
            }

            self.stream_id = self.agent_streamconfig_map[platform_id][
                'stream_id']

            #            import pprint
            #            print '============== platform id within unit test: %s ===========' % platform_id
            #            pprint.pprint(agent_config)
            #agent_config['platform_config']['agent_streamconfig_map'] = None

            agent_instance_obj = IonObject(
                RT.PlatformAgentInstance,
                name='%s_PlatformAgentInstance' % platform_id,
                description="%s_PlatformAgentInstance" % platform_id,
                agent_config=agent_config)

            agent_id = plat_objs['agent_id']
            device_id = plat_objs['device_id']
            agent_instance_id = self.imsclient.create_platform_agent_instance(
                agent_instance_obj, agent_id, self.device_id)

            plat_objs['agent_instance_obj'] = agent_instance_obj
            plat_objs['agent_instance_id'] = agent_instance_id

            stream_config = self.agent_streamconfig_map[platform_id]
            self._start_data_subscriber(agent_instance_id, stream_config)

    def _start_data_subscriber(self, stream_name, stream_config):
        """
        Starts data subscriber for the given stream_name and stream_config
        """
        def consume_data(message, stream_route, stream_id):
            # A callback for processing subscribed-to data.
            log.info('Subscriber received data message: %s.', str(message))
            self._samples_received.append(message)
            self._async_data_result.set()

        log.info('_start_data_subscriber stream_name=%r', stream_name)

        stream_id = self.stream_id  #stream_config['stream_id']

        # Create subscription for the stream
        exchange_name = '%s_queue' % stream_name
        self.container.ex_manager.create_xn_queue(exchange_name).purge()
        sub = StandaloneStreamSubscriber(exchange_name, consume_data)
        sub.start()
        self._data_subscribers.append(sub)
        sub_id = self.pubsubcli.create_subscription(name=exchange_name,
                                                    stream_ids=[stream_id])
        self.pubsubcli.activate_subscription(sub_id)
        sub.subscription_id = sub_id

    def _stop_data_subscribers(self):
        """
        Stop the data subscribers on cleanup.
        """
        try:
            for sub in self._data_subscribers:
                if hasattr(sub, 'subscription_id'):
                    try:
                        self.pubsubcli.deactivate_subscription(
                            sub.subscription_id)
                    except:
                        pass
                    self.pubsubcli.delete_subscription(sub.subscription_id)
                sub.stop()
        finally:
            self._data_subscribers = []

    def _start_event_subscriber(self,
                                event_type="DeviceEvent",
                                sub_type="platform_event"):
        """
        Starts event subscriber for events of given event_type ("DeviceEvent"
        by default) and given sub_type ("platform_event" by default).
        """
        def consume_event(evt, *args, **kwargs):
            # A callback for consuming events.
            log.info('Event subscriber received evt: %s.', str(evt))
            self._events_received.append(evt)
            self._async_event_result.set(evt)

        sub = EventSubscriber(event_type=event_type,
                              sub_type=sub_type,
                              callback=consume_event)

        sub.start()
        log.info("registered event subscriber for event_type=%r, sub_type=%r",
                 event_type, sub_type)

        self._event_subscribers.append(sub)
        sub._ready_event.wait(timeout=EVENT_TIMEOUT)

    def _stop_event_subscribers(self):
        """
        Stops the event subscribers on cleanup.
        """
        try:
            for sub in self._event_subscribers:
                if hasattr(sub, 'subscription_id'):
                    try:
                        self.pubsubcli.deactivate_subscription(
                            sub.subscription_id)
                    except:
                        pass
                    self.pubsubcli.delete_subscription(sub.subscription_id)
                sub.stop()
        finally:
            self._event_subscribers = []

    @skip("IMS does't net implement topology")
    def test_hierarchy(self):
        self._create_launch_verify(BASE_PLATFORM_ID)

    @skip("Needs alignment with recent IMS changes")
    def test_single_platform(self):
        self._create_launch_verify('LJ01D')

    def _create_launch_verify(self, base_platform_id):
        # and trigger the traversal of the branch rooted at that base platform
        # to create corresponding ION objects and configuration dictionaries:

        pnode = self._network_definition.pnodes[base_platform_id]
        base_platform_objs = self._traverse(pnode, base_platform_id)

        # now that most of the topology information is there, add the
        # PlatformAgentInstance elements
        self._set_platform_agent_instances()

        base_platform_config = self.platform_configs[base_platform_id]

        log.info("base_platform_id = %r", base_platform_id)

        #-------------------------------------------------------------------------------------
        # Create Data Process Definition and Data Process for the eng stream monitor process
        #-------------------------------------------------------------------------------------
        dpd_obj = IonObject(
            RT.DataProcessDefinition,
            name='DemoStreamAlertTransform',
            description='For testing EventTriggeredTransform_B',
            module='ion.processes.data.transforms.event_alert_transform',
            class_name='DemoStreamAlertTransform')
        self.platform_dprocdef_id = self.dataprocessclient.create_data_process_definition(
            dpd_obj)

        #THERE SHOULD BE NO STREAMDEF REQUIRED HERE.
        platform_streamdef_id = self.pubsubcli.create_stream_definition(
            name='platform_eng_parsed', parameter_dictionary_id=self.pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            platform_streamdef_id, self.platform_dprocdef_id, binding='output')

        config = {
            'process': {
                'timer_interval': 5,
                'queue_name': 'a_queue',
                'variable_name': 'input_voltage',
                'time_field_name': 'preferred_timestamp',
                'valid_values': [-100, 100],
                'timer_origin': 'Interval Timer'
            }
        }

        platform_data_process_id = self.dataprocessclient.create_data_process(
            self.platform_dprocdef_id, [self.data_product_id], {}, config)
        self.dataprocessclient.activate_data_process(platform_data_process_id)
        self.addCleanup(self.dataprocessclient.delete_data_process,
                        platform_data_process_id)

        #-------------------------------
        # Launch Base Platform AgentInstance, connect to the resource agent client
        #-------------------------------

        agent_instance_id = base_platform_objs['agent_instance_id']
        log.debug(
            "about to call imsclient.start_platform_agent_instance with id=%s",
            agent_instance_id)
        pid = self.imsclient.start_platform_agent_instance(
            platform_agent_instance_id=agent_instance_id)
        log.debug("start_platform_agent_instance returned pid=%s", pid)

        #wait for start
        instance_obj = self.imsclient.read_platform_agent_instance(
            agent_instance_id)
        gate = ProcessStateGate(self.processdispatchclient.read_process,
                                instance_obj.agent_process_id,
                                ProcessStateEnum.RUNNING)
        self.assertTrue(
            gate. await (90),
            "The platform agent instance did not spawn in 90 seconds")

        agent_instance_obj = self.imsclient.read_instrument_agent_instance(
            agent_instance_id)
        log.debug(
            'test_oms_create_and_launch: Platform agent instance obj: %s',
            str(agent_instance_obj))

        # Start a resource agent client to talk with the instrument agent.
        self._pa_client = ResourceAgentClient(
            'paclient',
            name=agent_instance_obj.agent_process_id,
            process=FakeProcess())
        log.debug(" test_oms_create_and_launch:: got pa client %s",
                  str(self._pa_client))

        log.debug("base_platform_config =\n%s", base_platform_config)

        # ping_agent can be issued before INITIALIZE
        retval = self._pa_client.ping_agent(timeout=TIMEOUT)
        log.debug('Base Platform ping_agent = %s', str(retval))

        # issue INITIALIZE command to the base platform, which will launch the
        # creation of the whole platform hierarchy rooted at base_platform_config['platform_id']
        #        cmd = AgentCommand(command=PlatformAgentEvent.INITIALIZE, kwargs=dict(plat_config=base_platform_config))
        cmd = AgentCommand(command=PlatformAgentEvent.INITIALIZE)
        retval = self._pa_client.execute_agent(cmd, timeout=TIMEOUT)
        log.debug('Base Platform INITIALIZE = %s', str(retval))

        # GO_ACTIVE
        cmd = AgentCommand(command=PlatformAgentEvent.GO_ACTIVE)
        retval = self._pa_client.execute_agent(cmd, timeout=TIMEOUT)
        log.debug('Base Platform GO_ACTIVE = %s', str(retval))

        # RUN:
        cmd = AgentCommand(command=PlatformAgentEvent.RUN)
        retval = self._pa_client.execute_agent(cmd, timeout=TIMEOUT)
        log.debug('Base Platform RUN = %s', str(retval))

        # START_MONITORING:
        cmd = AgentCommand(command=PlatformAgentEvent.START_MONITORING)
        retval = self._pa_client.execute_agent(cmd, timeout=TIMEOUT)
        log.debug('Base Platform START_MONITORING = %s', str(retval))

        # wait for data sample
        # just wait for at least one -- see consume_data above
        log.info("waiting for reception of a data sample...")
        self._async_data_result.get(timeout=DATA_TIMEOUT)
        self.assertTrue(len(self._samples_received) >= 1)

        log.info("waiting a bit more for reception of more data samples...")
        sleep(15)
        log.info("Got data samples: %d", len(self._samples_received))

        # wait for event
        # just wait for at least one event -- see consume_event above
        log.info("waiting for reception of an event...")
        self._async_event_result.get(timeout=EVENT_TIMEOUT)
        log.info("Received events: %s", len(self._events_received))

        #get the extended platfrom which wil include platform aggreate status fields
        extended_platform = self.imsclient.get_platform_device_extension(
            self.device_id)
        #        log.debug( 'test_single_platform   extended_platform: %s', str(extended_platform) )
        #        log.debug( 'test_single_platform   power_status_roll_up: %s', str(extended_platform.computed.power_status_roll_up.value) )
        #        log.debug( 'test_single_platform   comms_status_roll_up: %s', str(extended_platform.computed.communications_status_roll_up.value) )

        # STOP_MONITORING:
        cmd = AgentCommand(command=PlatformAgentEvent.STOP_MONITORING)
        retval = self._pa_client.execute_agent(cmd, timeout=TIMEOUT)
        log.debug('Base Platform STOP_MONITORING = %s', str(retval))

        # GO_INACTIVE
        cmd = AgentCommand(command=PlatformAgentEvent.GO_INACTIVE)
        retval = self._pa_client.execute_agent(cmd, timeout=TIMEOUT)
        log.debug('Base Platform GO_INACTIVE = %s', str(retval))

        # RESET: Resets the base platform agent, which includes termination of
        # its sub-platforms processes:
        cmd = AgentCommand(command=PlatformAgentEvent.RESET)
        retval = self._pa_client.execute_agent(cmd, timeout=TIMEOUT)
        log.debug('Base Platform RESET = %s', str(retval))

        #-------------------------------
        # Stop Base Platform AgentInstance
        #-------------------------------
        self.imsclient.stop_platform_agent_instance(
            platform_agent_instance_id=agent_instance_id)
コード例 #14
0
class TestOmsLaunch(IonIntegrationTestCase):

    def setUp(self):
        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.omsclient = ObservatoryManagementServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.dpclient = DataProductManagementServiceClient(node=self.container.node)
        self.pubsubcli = PubsubManagementServiceClient(node=self.container.node)
        self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node)
        self.dataset_management = DatasetManagementServiceClient()


        # Use the network definition provided by RSN OMS directly.
        rsn_oms = CIOMSClientFactory.create_instance(DVR_CONFIG['oms_uri'])
        self._network_definition = RsnOmsUtil.build_network_definition(rsn_oms)
        # get serialized version for the configuration:
        self._network_definition_ser = NetworkUtil.serialize_network_definition(self._network_definition)
        if log.isEnabledFor(logging.DEBUG):
            log.debug("NetworkDefinition serialization:\n%s", self._network_definition_ser)



        self.platformModel_id = None

        self.all_platforms = {}
        self.agent_streamconfig_map = {}

        self._async_data_result = AsyncResult()
        self._data_subscribers = []
        self._samples_received = []
        self.addCleanup(self._stop_data_subscribers)

        self._async_event_result = AsyncResult()
        self._event_subscribers = []
        self._events_received = []
        self.addCleanup(self._stop_event_subscribers)
        self._start_event_subscriber()

        self._set_up_DataProduct_obj()
        self._set_up_PlatformModel_obj()

    def _set_up_DataProduct_obj(self):
        # Create data product object to be used for each of the platform log streams
        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()

        self.pdict_id = self.dataset_management.read_parameter_dictionary_by_name('platform_eng_parsed', id_only=True)
        self.platform_eng_stream_def_id = self.pubsubcli.create_stream_definition(
            name='platform_eng', parameter_dictionary_id=self.pdict_id)
        self.dp_obj = IonObject(RT.DataProduct,
            name='platform_eng data',
            description='platform_eng test',
            temporal_domain = tdom,
            spatial_domain = sdom)

    def _set_up_PlatformModel_obj(self):
        # Create PlatformModel
        platformModel_obj = IonObject(RT.PlatformModel,
            name='RSNPlatformModel',
            description="RSNPlatformModel")
        try:
            self.platformModel_id = self.imsclient.create_platform_model(platformModel_obj)
        except BadRequest as ex:
            self.fail("failed to create new PLatformModel: %s" %ex)
        log.debug( 'new PlatformModel id = %s', self.platformModel_id)

    def _traverse(self, pnode, platform_id, parent_platform_objs=None):
        """
        Recursive routine that repeatedly calls _prepare_platform to build
        the object dictionary for each platform.

        @param pnode PlatformNode
        @param platform_id ID of the platform to be visited
        @param parent_platform_objs dict of objects associated to parent
                        platform, if any.

        @retval the dict returned by _prepare_platform at this level.
        """

        log.info("Starting _traverse for %r", platform_id)

        plat_objs = self._prepare_platform(pnode, platform_id, parent_platform_objs)

        self.all_platforms[platform_id] = plat_objs

        # now, traverse the children:
        for sub_pnode in pnode.subplatforms.itervalues():
            subplatform_id = sub_pnode.platform_id
            self._traverse(sub_pnode, subplatform_id, plat_objs)

        return plat_objs

    def _prepare_platform(self, pnode, platform_id, parent_platform_objs):
        """
        This routine generalizes the manual construction originally done in
        test_oms_launch.py. It is called by the recursive _traverse method so
        all platforms starting from a given base platform are prepared.

        Note: For simplicity in this test, sites are organized in the same
        hierarchical way as the platforms themselves.

        @param pnode PlatformNode
        @param platform_id ID of the platform to be visited
        @param parent_platform_objs dict of objects associated to parent
                        platform, if any.

        @retval a dict of associated objects similar to those in
                test_oms_launch
        """

        site__obj = IonObject(RT.PlatformSite,
            name='%s_PlatformSite' % platform_id,
            description='%s_PlatformSite platform site' % platform_id)

        site_id = self.omsclient.create_platform_site(site__obj)

        if parent_platform_objs:
            # establish hasSite association with the parent
            self.rrclient.create_association(
                subject=parent_platform_objs['site_id'],
                predicate=PRED.hasSite,
                object=site_id)

        # prepare platform attributes and ports:
        monitor_attribute_objs, monitor_attribute_dicts = self._prepare_platform_attributes(pnode, platform_id)

        port_objs, port_dicts = self._prepare_platform_ports(pnode, platform_id)

        device__obj = IonObject(RT.PlatformDevice,
            name='%s_PlatformDevice' % platform_id,
            description='%s_PlatformDevice platform device' % platform_id,
            #                        ports=port_objs,
            #                        platform_monitor_attributes = monitor_attribute_objs
        )

        device__dict = dict(ports=port_dicts,
            platform_monitor_attributes=monitor_attribute_dicts)

        self.device_id = self.imsclient.create_platform_device(device__obj)

        self.imsclient.assign_platform_model_to_platform_device(self.platformModel_id, self.device_id)
        self.rrclient.create_association(subject=site_id, predicate=PRED.hasDevice, object=self.device_id)
        self.damsclient.register_instrument(instrument_id=self.device_id)


        if parent_platform_objs:
            # establish hasDevice association with the parent
            self.rrclient.create_association(
                subject=parent_platform_objs['device_id'],
                predicate=PRED.hasDevice,
                object=self.device_id)

        agent__obj = IonObject(RT.PlatformAgent,
            name='%s_PlatformAgent' % platform_id,
            description='%s_PlatformAgent platform agent' % platform_id)

        agent_id = self.imsclient.create_platform_agent(agent__obj)

        if parent_platform_objs:
            # add this platform_id to parent's children:
            parent_platform_objs['children'].append(platform_id)


        self.imsclient.assign_platform_model_to_platform_agent(self.platformModel_id, agent_id)

        #        agent_instance_obj = IonObject(RT.PlatformAgentInstance,
        #                                name='%s_PlatformAgentInstance' % platform_id,
        #                                description="%s_PlatformAgentInstance" % platform_id)
        #
        #        agent_instance_id = self.imsclient.create_platform_agent_instance(
        #                            agent_instance_obj, agent_id, device_id)

        plat_objs = {
            'platform_id':        platform_id,
            'site__obj':          site__obj,
            'site_id':            site_id,
            'device__obj':        device__obj,
            'device_id':          self.device_id,
            'agent__obj':         agent__obj,
            'agent_id':           agent_id,
            #            'agent_instance_obj': agent_instance_obj,
            #            'agent_instance_id':  agent_instance_id,
            'children':           []
        }

        log.info("plat_objs for platform_id %r = %s", platform_id, str(plat_objs))

        stream_config = self._create_stream_config(plat_objs)
        self.agent_streamconfig_map[platform_id] = stream_config
        #        self.agent_streamconfig_map[platform_id] = None
        #        self._start_data_subscriber(agent_instance_id, stream_config)

        return plat_objs

    def _prepare_platform_attributes(self, pnode, platform_id):
        """
        Returns the list of PlatformMonitorAttributes objects corresponding to
        the attributes associated to the given platform.
        """
        # TODO complete the clean-up of this method
        ret_infos = dict((n, a.defn) for (n, a) in pnode.attrs.iteritems())

        monitor_attribute_objs = []
        monitor_attribute_dicts = []
        for attrName, attrDfn in ret_infos.iteritems():
            log.debug("platform_id=%r: preparing attribute=%r", platform_id, attrName)

            monitor_rate = attrDfn['monitorCycleSeconds']
            units =        attrDfn['units']

            plat_attr_obj = IonObject(OT.PlatformMonitorAttributes,
                id=attrName,
                monitor_rate=monitor_rate,
                units=units)

            plat_attr_dict = dict(id=attrName,
                monitor_rate=monitor_rate,
                units=units)

            monitor_attribute_objs.append(plat_attr_obj)
            monitor_attribute_dicts.append(plat_attr_dict)

        return monitor_attribute_objs, monitor_attribute_dicts

    def _prepare_platform_ports(self, pnode, platform_id):
        """
        Returns the list of PlatformPort objects corresponding to the ports
        associated to the given platform.
        """
        # TODO complete the clean-up of this method

        port_objs = []
        port_dicts = []
        for port_id, network in pnode.ports.iteritems():
            log.debug("platform_id=%r: preparing port=%r network=%s",
                      platform_id, port_id, network)

            #
            # Note: the name "IP" address has been changed to "network" address
            # in the CI-OMS interface spec.
            #
            plat_port_obj = IonObject(OT.PlatformPort,
                                      port_id=port_id,
                                      ip_address=network)

            plat_port_dict = dict(port_id=port_id,
                                  network=network)

            port_objs.append(plat_port_obj)

            port_dicts.append(plat_port_dict)

        return port_objs, port_dicts

    def _create_stream_config(self, plat_objs):

        platform_id = plat_objs['platform_id']
        device_id =   plat_objs['device_id']


        #create the log data product
        self.dp_obj.name = '%s platform_eng data' % platform_id
        self.data_product_id = self.dpclient.create_data_product(data_product=self.dp_obj, stream_definition_id=self.platform_eng_stream_def_id)
        self.damsclient.assign_data_product(input_resource_id=self.device_id, data_product_id=self.data_product_id)
        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(self.data_product_id, PRED.hasStream, None, True)

        stream_config = self._build_stream_config(stream_ids[0])
        return stream_config

    def _build_stream_config(self, stream_id=''):

        platform_eng_dictionary = DatasetManagementService.get_parameter_dictionary_by_name('platform_eng_parsed')

        #get the streamroute object from pubsub by passing the stream_id
        stream_def_ids, _ = self.rrclient.find_objects(stream_id,
            PRED.hasStreamDefinition,
            RT.StreamDefinition,
            True)


        stream_route = self.pubsubcli.read_stream_route(stream_id=stream_id)
        stream_config = {'routing_key' : stream_route.routing_key,
                         'stream_id' : stream_id,
                         'stream_definition_ref' : stream_def_ids[0],
                         'exchange_point' : stream_route.exchange_point,
                         'parameter_dictionary':platform_eng_dictionary.dump()}

        return stream_config

    def _set_platform_agent_instances(self):
        """
        Once most of the objs/defs associated with all platforms are in
        place, this method creates and associates the PlatformAgentInstance
        elements.
        """

        self.platform_configs = {}
        for platform_id, plat_objs in self.all_platforms.iteritems():

            PLATFORM_CONFIG  = {
                'platform_id':             platform_id,

                'agent_streamconfig_map':  None, #self.agent_streamconfig_map,

                'driver_config':           DVR_CONFIG,

                'network_definition' :     self._network_definition_ser
                }

            self.platform_configs[platform_id] = {
                'platform_id':             platform_id,

                'agent_streamconfig_map':  self.agent_streamconfig_map,

                'driver_config':           DVR_CONFIG,

                'network_definition' :     self._network_definition_ser
                }

            agent_config = {
                'platform_config': PLATFORM_CONFIG,
                }

            self.stream_id = self.agent_streamconfig_map[platform_id]['stream_id']

            #            import pprint
            #            print '============== platform id within unit test: %s ===========' % platform_id
            #            pprint.pprint(agent_config)
            #agent_config['platform_config']['agent_streamconfig_map'] = None

            agent_instance_obj = IonObject(RT.PlatformAgentInstance,
                name='%s_PlatformAgentInstance' % platform_id,
                description="%s_PlatformAgentInstance" % platform_id,
                agent_config=agent_config)

            agent_id = plat_objs['agent_id']
            device_id = plat_objs['device_id']
            agent_instance_id = self.imsclient.create_platform_agent_instance(
                agent_instance_obj, agent_id, self.device_id)

            plat_objs['agent_instance_obj'] = agent_instance_obj
            plat_objs['agent_instance_id']  = agent_instance_id


            stream_config = self.agent_streamconfig_map[platform_id]
            self._start_data_subscriber(agent_instance_id, stream_config)


    def _start_data_subscriber(self, stream_name, stream_config):
        """
        Starts data subscriber for the given stream_name and stream_config
        """

        def consume_data(message, stream_route, stream_id):
            # A callback for processing subscribed-to data.
            log.info('Subscriber received data message: %s.', str(message))
            self._samples_received.append(message)
            self._async_data_result.set()

        log.info('_start_data_subscriber stream_name=%r', stream_name)

        stream_id = self.stream_id #stream_config['stream_id']

        # Create subscription for the stream
        exchange_name = '%s_queue' % stream_name
        self.container.ex_manager.create_xn_queue(exchange_name).purge()
        sub = StandaloneStreamSubscriber(exchange_name, consume_data)
        sub.start()
        self._data_subscribers.append(sub)
        sub_id = self.pubsubcli.create_subscription(name=exchange_name, stream_ids=[stream_id])
        self.pubsubcli.activate_subscription(sub_id)
        sub.subscription_id = sub_id

    def _stop_data_subscribers(self):
        """
        Stop the data subscribers on cleanup.
        """
        try:
            for sub in self._data_subscribers:
                if hasattr(sub, 'subscription_id'):
                    try:
                        self.pubsubcli.deactivate_subscription(sub.subscription_id)
                    except:
                        pass
                    self.pubsubcli.delete_subscription(sub.subscription_id)
                sub.stop()
        finally:
            self._data_subscribers = []

    def _start_event_subscriber(self, event_type="DeviceEvent", sub_type="platform_event"):
        """
        Starts event subscriber for events of given event_type ("DeviceEvent"
        by default) and given sub_type ("platform_event" by default).
        """

        def consume_event(evt, *args, **kwargs):
            # A callback for consuming events.
            log.info('Event subscriber received evt: %s.', str(evt))
            self._events_received.append(evt)
            self._async_event_result.set(evt)

        sub = EventSubscriber(event_type=event_type,
            sub_type=sub_type,
            callback=consume_event)

        sub.start()
        log.info("registered event subscriber for event_type=%r, sub_type=%r",
            event_type, sub_type)

        self._event_subscribers.append(sub)
        sub._ready_event.wait(timeout=EVENT_TIMEOUT)

    def _stop_event_subscribers(self):
        """
        Stops the event subscribers on cleanup.
        """
        try:
            for sub in self._event_subscribers:
                if hasattr(sub, 'subscription_id'):
                    try:
                        self.pubsubcli.deactivate_subscription(sub.subscription_id)
                    except:
                        pass
                    self.pubsubcli.delete_subscription(sub.subscription_id)
                sub.stop()
        finally:
            self._event_subscribers = []

    @skip("IMS does't net implement topology")
    def test_hierarchy(self):
        self._create_launch_verify(BASE_PLATFORM_ID)

    @skip("Needs alignment with recent IMS changes")
    def test_single_platform(self):
        self._create_launch_verify('LJ01D')

    def _create_launch_verify(self, base_platform_id):
        # and trigger the traversal of the branch rooted at that base platform
        # to create corresponding ION objects and configuration dictionaries:

        pnode = self._network_definition.pnodes[base_platform_id]
        base_platform_objs = self._traverse(pnode, base_platform_id)

        # now that most of the topology information is there, add the
        # PlatformAgentInstance elements
        self._set_platform_agent_instances()

        base_platform_config = self.platform_configs[base_platform_id]

        log.info("base_platform_id = %r", base_platform_id)


        #-------------------------------------------------------------------------------------
        # Create Data Process Definition and Data Process for the eng stream monitor process
        #-------------------------------------------------------------------------------------
        dpd_obj = IonObject(RT.DataProcessDefinition,
            name='DemoStreamAlertTransform',
            description='For testing EventTriggeredTransform_B',
            module='ion.processes.data.transforms.event_alert_transform',
            class_name='DemoStreamAlertTransform')
        self.platform_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)

        #THERE SHOULD BE NO STREAMDEF REQUIRED HERE.
        platform_streamdef_id = self.pubsubcli.create_stream_definition(name='platform_eng_parsed', parameter_dictionary_id=self.pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(platform_streamdef_id, self.platform_dprocdef_id, binding='output' )

        config = {
            'process':{
                'timer_interval': 5,
                'queue_name': 'a_queue',
                'variable_name': 'input_voltage',
                'time_field_name': 'preferred_timestamp',
                'valid_values': [-100, 100],
                'timer_origin': 'Interval Timer'
            }
        }


        platform_data_process_id = self.dataprocessclient.create_data_process(self.platform_dprocdef_id, [self.data_product_id], {}, config)
        self.dataprocessclient.activate_data_process(platform_data_process_id)
        self.addCleanup(self.dataprocessclient.delete_data_process, platform_data_process_id)

        #-------------------------------
        # Launch Base Platform AgentInstance, connect to the resource agent client
        #-------------------------------

        agent_instance_id = base_platform_objs['agent_instance_id']
        log.debug("about to call imsclient.start_platform_agent_instance with id=%s", agent_instance_id)
        pid = self.imsclient.start_platform_agent_instance(platform_agent_instance_id=agent_instance_id)
        log.debug("start_platform_agent_instance returned pid=%s", pid)

        #wait for start
        instance_obj = self.imsclient.read_platform_agent_instance(agent_instance_id)
        gate = ProcessStateGate(self.processdispatchclient.read_process,
            instance_obj.agent_process_id,
            ProcessStateEnum.RUNNING)
        self.assertTrue(gate.await(90), "The platform agent instance did not spawn in 90 seconds")

        agent_instance_obj= self.imsclient.read_instrument_agent_instance(agent_instance_id)
        log.debug('test_oms_create_and_launch: Platform agent instance obj: %s', str(agent_instance_obj))

        # Start a resource agent client to talk with the instrument agent.
        self._pa_client = ResourceAgentClient('paclient', name=agent_instance_obj.agent_process_id,  process=FakeProcess())
        log.debug(" test_oms_create_and_launch:: got pa client %s", str(self._pa_client))

        log.debug("base_platform_config =\n%s", base_platform_config)

        # ping_agent can be issued before INITIALIZE
        retval = self._pa_client.ping_agent(timeout=TIMEOUT)
        log.debug( 'Base Platform ping_agent = %s', str(retval) )

        # issue INITIALIZE command to the base platform, which will launch the
        # creation of the whole platform hierarchy rooted at base_platform_config['platform_id']
        #        cmd = AgentCommand(command=PlatformAgentEvent.INITIALIZE, kwargs=dict(plat_config=base_platform_config))
        cmd = AgentCommand(command=PlatformAgentEvent.INITIALIZE)
        retval = self._pa_client.execute_agent(cmd, timeout=TIMEOUT)
        log.debug( 'Base Platform INITIALIZE = %s', str(retval) )


        # GO_ACTIVE
        cmd = AgentCommand(command=PlatformAgentEvent.GO_ACTIVE)
        retval = self._pa_client.execute_agent(cmd, timeout=TIMEOUT)
        log.debug( 'Base Platform GO_ACTIVE = %s', str(retval) )

        # RUN:
        cmd = AgentCommand(command=PlatformAgentEvent.RUN)
        retval = self._pa_client.execute_agent(cmd, timeout=TIMEOUT)
        log.debug( 'Base Platform RUN = %s', str(retval) )

        # START_MONITORING:
        cmd = AgentCommand(command=PlatformAgentEvent.START_MONITORING)
        retval = self._pa_client.execute_agent(cmd, timeout=TIMEOUT)
        log.debug( 'Base Platform START_MONITORING = %s', str(retval) )

        # wait for data sample
        # just wait for at least one -- see consume_data above
        log.info("waiting for reception of a data sample...")
        self._async_data_result.get(timeout=DATA_TIMEOUT)
        self.assertTrue(len(self._samples_received) >= 1)

        log.info("waiting a bit more for reception of more data samples...")
        sleep(15)
        log.info("Got data samples: %d", len(self._samples_received))


        # wait for event
        # just wait for at least one event -- see consume_event above
        log.info("waiting for reception of an event...")
        self._async_event_result.get(timeout=EVENT_TIMEOUT)
        log.info("Received events: %s", len(self._events_received))

        #get the extended platfrom which wil include platform aggreate status fields
        extended_platform = self.imsclient.get_platform_device_extension(self.device_id)
#        log.debug( 'test_single_platform   extended_platform: %s', str(extended_platform) )
#        log.debug( 'test_single_platform   power_status_roll_up: %s', str(extended_platform.computed.power_status_roll_up.value) )
#        log.debug( 'test_single_platform   comms_status_roll_up: %s', str(extended_platform.computed.communications_status_roll_up.value) )

        # STOP_MONITORING:
        cmd = AgentCommand(command=PlatformAgentEvent.STOP_MONITORING)
        retval = self._pa_client.execute_agent(cmd, timeout=TIMEOUT)
        log.debug( 'Base Platform STOP_MONITORING = %s', str(retval) )

        # GO_INACTIVE
        cmd = AgentCommand(command=PlatformAgentEvent.GO_INACTIVE)
        retval = self._pa_client.execute_agent(cmd, timeout=TIMEOUT)
        log.debug( 'Base Platform GO_INACTIVE = %s', str(retval) )

        # RESET: Resets the base platform agent, which includes termination of
        # its sub-platforms processes:
        cmd = AgentCommand(command=PlatformAgentEvent.RESET)
        retval = self._pa_client.execute_agent(cmd, timeout=TIMEOUT)
        log.debug( 'Base Platform RESET = %s', str(retval) )



        #-------------------------------
        # Stop Base Platform AgentInstance
        #-------------------------------
        self.imsclient.stop_platform_agent_instance(platform_agent_instance_id=agent_instance_id)