class TestContainerExchangeToEms(IonIntegrationTestCase):
    # these tests should auto contact the EMS to do the work
    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2coi.yml')

        self.ems = ExchangeManagementServiceClient()
        self.rr = ResourceRegistryServiceClient()

        # we want the ex manager to do its thing, but without actual calls to broker
        # just mock out the transport
        self.container.ex_manager._priviledged_transport = Mock(BaseTransport)

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),
                     'Test reaches into container, doesn\'t work with CEI')
    def test_create_xs_talks_to_ems(self):
        self.patch_cfg('pyon.ion.exchange.CFG',
                       container=DotDict(CFG.container,
                                         exchange=DotDict(auto_register=True)))

        xs = self.container.ex_manager.create_xs('house')
        self.addCleanup(xs.delete)

        # should have called EMS and set RR items
        res, _ = self.rr.find_resources(RT.ExchangeSpace, name='house')
        self.assertEquals(res[0].name, 'house')

        # should have tried to call broker as well
        self.assertEquals(
            self.container.ex_manager._priviledged_transport.
            declare_exchange_impl.call_count, 1)
        self.assertIn(
            'house', self.container.ex_manager._priviledged_transport.
            declare_exchange_impl.call_args[0][0])

    @patch.dict('pyon.ion.exchange.CFG',
                container=DotDict(CFG.container,
                                  exchange=DotDict(auto_register=False)))
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),
                     'Test reaches into container, doesn\'t work with CEI')
    def test_create_xs_with_no_flag_only_uses_ex_manager(self):
        self.patch_cfg('pyon.ion.exchange.CFG',
                       container=DotDict(
                           CFG.container,
                           exchange=DotDict(auto_register=False)))

        xs = self.container.ex_manager.create_xs('house')
        self.addCleanup(xs.delete)

        e1, e2 = self.rr.find_resources(RT.ExchangeSpace, name='house')
        self.assertEquals(e1, [])
        self.assertEquals(e2, [])
        self.assertEquals(
            self.container.ex_manager._priviledged_transport.
            declare_exchange_impl.call_count, 1)
        self.assertIn(
            'house', self.container.ex_manager._priviledged_transport.
            declare_exchange_impl.call_args[0][0])
示例#2
0
def helper_create_highcharts_workflow_def(container):

    from interface.services.coi.iresource_registry_service import ResourceRegistryServiceClient
    rrclient = ResourceRegistryServiceClient(node=container.node)

    # Check to see if the workflow defnition already exist
    workflow_def_ids,_ = rrclient.find_resources(restype=RT.WorkflowDefinition, name='Realtime_HighCharts', id_only=True)

    if len(workflow_def_ids) > 0:
        workflow_def_id = workflow_def_ids[0]
    else:
        # Build the workflow definition
        workflow_def_obj = IonObject(RT.WorkflowDefinition, name='Realtime_HighCharts',description='Convert stream data to HighCharts data')

        #Add a transformation process definition
        procdef_id = helper_create_highcharts_data_process_definition(container)
        workflow_step_obj = IonObject('DataProcessWorkflowStep', data_process_definition_id=procdef_id)
        workflow_def_obj.workflow_steps.append(workflow_step_obj)

        #Create it in the resource registry
        from interface.services.ans.iworkflow_management_service import WorkflowManagementServiceClient
        workflowclient = WorkflowManagementServiceClient(node=container.node)

        workflow_def_id = workflowclient.create_workflow_definition(workflow_def_obj)

    return workflow_def_id
示例#3
0
    def is_service_available(self, service_name, local_rr_only=False):

        try:
            service_resource = None
            #from pyon.core.bootstrap import container_instance
            from mi.core.bootstrap import container_instance
            from interface.objects import ServiceStateEnum
            # Use container direct RR connection if available, otherwise use messaging to the RR service
            if hasattr(container_instance, 'has_capability') and container_instance.has_capability('RESOURCE_REGISTRY'):
                service_resource, _ = container_instance.resource_registry.find_resources(restype='Service', name=service_name)
            elif not local_rr_only:
                from interface.services.coi.iresource_registry_service import ResourceRegistryServiceClient
                rr_client = ResourceRegistryServiceClient(container_instance.node)
                service_resource, _ = rr_client.find_resources(restype='Service', name=service_name)
            else:
                log.warn("is_service_available(%s) - No RR connection" % service_name)

            # The service is available only of there is a single RR object for it and it is in one of these states:
            if service_resource and len(service_resource) > 1:
                log.warn("is_service_available(%s) - Found multiple service instances: %s", service_name, service_resource)

            # MM 2013-08-17: Added PENDING, because this means service will be there shortly
            if service_resource and service_resource[0].state in (ServiceStateEnum.READY, ServiceStateEnum.STEADY, ServiceStateEnum.PENDING):
                return True
            elif service_resource:
                log.warn("is_service_available(%s) - Service resource in invalid state", service_resource)

            return False

        except Exception as ex:
            return False
示例#4
0
文件: service.py 项目: pkediyal/pyon
    def is_service_available(self, service_name, local_rr_only=False):

        try:
            service_resource = None
            from pyon.core.bootstrap import container_instance
            from interface.objects import ServiceStateEnum
            #Use container direct RR connection if available, otherwise use messaging to the RR service
            if hasattr(container_instance,
                       'has_capability') and container_instance.has_capability(
                           'RESOURCE_REGISTRY'):
                service_resource, _ = container_instance.resource_registry.find_resources(
                    restype='Service', name=service_name)
            else:
                if not local_rr_only:
                    from interface.services.coi.iresource_registry_service import ResourceRegistryServiceClient
                    rr_client = ResourceRegistryServiceClient(
                        container_instance.node)
                    service_resource, _ = rr_client.find_resources(
                        restype='Service', name=service_name)

            #The service is available only of there is a single RR object for it and it is in one of these states:
            if service_resource and (
                    service_resource[0].state == ServiceStateEnum.READY
                    or service_resource[0].state == ServiceStateEnum.STEADY):
                return True

            return False

        except Exception, e:
            return False
示例#5
0
    def is_service_available(self, service_name, local_rr_only=False):

        try:
            service_resource = None
            from pyon.core.bootstrap import container_instance
            from interface.objects import ServiceStateEnum
            #Use container direct RR connection if available, otherwise use messaging to the RR service
            if hasattr(container_instance, 'has_capability') and container_instance.has_capability('RESOURCE_REGISTRY'):
                service_resource, _ = container_instance.resource_registry.find_resources(restype='Service', name=service_name)
            else:
                if not local_rr_only:
                    from interface.services.coi.iresource_registry_service import ResourceRegistryServiceClient
                    rr_client = ResourceRegistryServiceClient(container_instance.node)
                    service_resource, _ = rr_client.find_resources(restype='Service', name=service_name)

            #The service is available only of there is a single RR object for it and it is in one of these states:
            if service_resource and len(service_resource) > 1:
                log.warn("Found multiple service instances registered under name %s: %s", service_name, service_resource)

            if service_resource and ( service_resource[0].state == ServiceStateEnum.READY or service_resource[0].state == ServiceStateEnum.STEADY ):
                return True
            elif service_resource:
                log.warn("Call to is_service_available() failed although a Service resource exists: %s", service_resource)

            return False

        except Exception, e:
            return False
class TestContainerExchangeToEms(IonIntegrationTestCase):
    # these tests should auto contact the EMS to do the work
    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2coi.yml')

        self.ems = ExchangeManagementServiceClient()
        self.rr = ResourceRegistryServiceClient()

        # we want the ex manager to do its thing, but without actual calls to broker
        # just mock out the transport
        self.container.ex_manager._priviledged_transport = Mock(BaseTransport)

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),'Test reaches into container, doesn\'t work with CEI')
    def test_create_xs_talks_to_ems(self):
        self.patch_cfg('pyon.ion.exchange.CFG', container=DotDict(CFG.container, exchange=DotDict(auto_register=True)))

        xs = self.container.ex_manager.create_xs('house')
        self.addCleanup(xs.delete)

        # should have called EMS and set RR items
        res, _ = self.rr.find_resources(RT.ExchangeSpace, name='house')
        self.assertEquals(res[0].name, 'house')

        # should have tried to call broker as well
        self.assertEquals(self.container.ex_manager._priviledged_transport.declare_exchange_impl.call_count, 1)
        self.assertIn('house', self.container.ex_manager._priviledged_transport.declare_exchange_impl.call_args[0][0])

    @patch.dict('pyon.ion.exchange.CFG', container=DotDict(CFG.container, exchange=DotDict(auto_register=False)))
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),'Test reaches into container, doesn\'t work with CEI')
    def test_create_xs_with_no_flag_only_uses_ex_manager(self):
        self.patch_cfg('pyon.ion.exchange.CFG', container=DotDict(CFG.container, exchange=DotDict(auto_register=False)))

        xs = self.container.ex_manager.create_xs('house')
        self.addCleanup(xs.delete)

        e1,e2 = self.rr.find_resources(RT.ExchangeSpace, name='house')
        self.assertEquals(e1, [])
        self.assertEquals(e2, [])
        self.assertEquals(self.container.ex_manager._priviledged_transport.declare_exchange_impl.call_count, 1)
        self.assertIn('house', self.container.ex_manager._priviledged_transport.declare_exchange_impl.call_args[0][0])
示例#7
0
文件: consumer.py 项目: ooici/ape
class DataProductConsumer(ApeComponent):
    def __init__(self, name, agent, configuration):
        unique_string = name + "_" + str(unique())
        self.process_name = 'ape_consumer_' + unique_string
        self.easy_registration = True
        self._transform_pid = None
        super(DataProductConsumer,self).__init__(name, agent, configuration)

    def _get_process_definition_id(self):
        if self.easy_registration:
            try:
                return self._read_process_definition_id()
            except:
                return self.register_process_definition()
        else:
            return self._read_process_definition_id()


    def _read_process_definition_id(self):
        process_ids,_ = self.resource_registry.find_resources(restype=RT.ProcessDefinition, lcstate=None, name=self.process_name, id_only=True)
        return process_ids[0]

    def _start(self):
        log.debug('consumer starting')
        self.resource_registry = ResourceRegistryServiceClient(node=self.agent.container.node)

    def enable_transform(self):
        product_ids,_ = self.resource_registry.find_resources(RT.DataProduct, None, self.configuration.data_product, id_only=True)
        if len(product_ids)>0:
            log.info('have registry entry, using id: %s', product_ids[0])
            dp = product_ids[0]
        else:
            # config = {'process':{'type':IMMEDIATE_PROCESS_TYPE, 'procinner': 'level2'}, 'base': 'level1'}
            dp = self.configuration.data_product
        process_config = {'process':{'type':STREAM_PROCESS_TYPE, 'queue_name': dp }}
        log.info('starting consumer on exchange: ' + dp)
        self._transform_pid = self.agent.container.proc_manager.spawn_process('ape_consumer_'+dp, 'ape.component.consumer', '_NoOpTransform', process_config)

    def disable_transform(self):
        pass

    def _stop(self):
        pass

    def perform_action(self, request):
        if isinstance(request, StartRequest):
            self.enable_transform()
        elif isinstance(request, StopRequest):
            self.disable_transform()
示例#8
0
    def is_service_available(self, service_name, local_rr_only=False):

        try:
            service_resource = None
            #from pyon.core.bootstrap import container_instance
            from mi.core.bootstrap import container_instance
            from interface.objects import ServiceStateEnum
            # Use container direct RR connection if available, otherwise use messaging to the RR service
            if hasattr(container_instance,
                       'has_capability') and container_instance.has_capability(
                           'RESOURCE_REGISTRY'):
                service_resource, _ = container_instance.resource_registry.find_resources(
                    restype='Service', name=service_name)
            elif not local_rr_only:
                from interface.services.coi.iresource_registry_service import ResourceRegistryServiceClient
                rr_client = ResourceRegistryServiceClient(
                    container_instance.node)
                service_resource, _ = rr_client.find_resources(
                    restype='Service', name=service_name)
            else:
                log.warn("is_service_available(%s) - No RR connection" %
                         service_name)

            # The service is available only of there is a single RR object for it and it is in one of these states:
            if service_resource and len(service_resource) > 1:
                log.warn(
                    "is_service_available(%s) - Found multiple service instances: %s",
                    service_name, service_resource)

            # MM 2013-08-17: Added PENDING, because this means service will be there shortly
            if service_resource and service_resource[0].state in (
                    ServiceStateEnum.READY, ServiceStateEnum.STEADY,
                    ServiceStateEnum.PENDING):
                return True
            elif service_resource:
                log.warn(
                    "is_service_available(%s) - Service resource in invalid state",
                    service_resource)

            return False

        except Exception as ex:
            return False
示例#9
0
def helper_create_highcharts_data_process_definition(container):

    from interface.services.coi.iresource_registry_service import ResourceRegistryServiceClient
    rrclient = ResourceRegistryServiceClient(node=container.node)

    #First look to see if it exists and if not, then create it
    dpd,_ = rrclient.find_resources(restype=RT.DataProcessDefinition, name='highcharts_transform')
    if len(dpd) > 0:
        return dpd[0]

    # Data Process Definition
    log.debug("Create data process definition for highcharts transform")
    dpd_obj = IonObject(RT.DataProcessDefinition,
        name='highcharts_transform',
        description='Convert data streams to Highcharts data',
        module='ion.processes.data.transforms.viz.highcharts',
        class_name='VizTransformHighCharts')

    from interface.services.sa.idata_process_management_service import DataProcessManagementServiceClient
    dataprocessclient = DataProcessManagementServiceClient(node=container.node)

    procdef_id = dataprocessclient.create_data_process_definition(dpd_obj)

    from interface.services.dm.idataset_management_service import DatasetManagementServiceClient
    datasetclient = DatasetManagementServiceClient(node=container.node)

    pdict_id = datasetclient.read_parameter_dictionary_by_name('highcharts', id_only=True)

    from interface.services.dm.ipubsub_management_service import PubsubManagementServiceClient
    pubsubclient = PubsubManagementServiceClient(node=container.node)

    # create a stream definition for the data from the
    stream_def_id = pubsubclient.create_stream_definition(name='VizTransformHighCharts', parameter_dictionary_id=pdict_id)
    dataprocessclient.assign_stream_definition_to_data_process_definition(stream_def_id, procdef_id, binding='highcharts' )

    return procdef_id
class TestTransformWorker(IonIntegrationTestCase):

    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        # Instantiate a process to represent the test
        process=TransformWorkerTestProcess()

        self.dataset_management_client = DatasetManagementServiceClient(node=self.container.node)
        self.pubsub_client = PubsubManagementServiceClient(node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node)
        self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceProcessClient(node=self.container.node, process = process)

        self.time_dom, self.spatial_dom = time_series_domain()

        self.ph = ParameterHelper(self.dataset_management_client, self.addCleanup)

        self.wait_time = CFG.get_safe('endpoint.receive.timeout', 10)

    def push_granule(self, data_product_id):
        '''
        Publishes and monitors that the granule arrived
        '''
        datasets, _ = self.rrclient.find_objects(data_product_id, PRED.hasDataset, id_only=True)
        dataset_monitor = DatasetMonitor(datasets[0])

        rdt = self.ph.rdt_for_data_product(data_product_id)
        self.ph.fill_parsed_rdt(rdt)
        self.ph.publish_rdt_to_data_product(data_product_id, rdt)

        assert dataset_monitor.wait()
        dataset_monitor.stop()



    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode')
    def test_transform_worker(self):

        # test that a data process (type: data-product-in / data-product-out) can be defined and launched.
        # verify that the output granule fields are correctly populated

        # test that the input and output data products are linked to facilitate provenance

        self.dp_list = []
        self.data_process_objs = []
        self._output_stream_ids = []
        self.granule_verified = Event()
        self.worker_assigned_event_verified = Event()
        self.dp_created_event_verified = Event()
        self.heartbeat_event_verified = Event()

        self.parameter_dict_id = self.dataset_management_client.read_parameter_dictionary_by_name(name='ctd_parsed_param_dict', id_only=True)

        # create the StreamDefinition
        self.stream_def_id = self.pubsub_client.create_stream_definition(name='stream_def', parameter_dictionary_id=self.parameter_dict_id)
        self.addCleanup(self.pubsub_client.delete_stream_definition, self.stream_def_id)

        # create the DataProduct that is the input to the data processes
        input_dp_obj = IonObject(  RT.DataProduct, name='input_data_product', description='input test stream',
                                             temporal_domain = self.time_dom.dump(),  spatial_domain = self.spatial_dom.dump())
        self.input_dp_id = self.dataproductclient.create_data_product(data_product=input_dp_obj,  stream_definition_id=self.stream_def_id)

        # retrieve the Stream for this data product
        stream_ids, assoc_ids = self.rrclient.find_objects(self.input_dp_id, PRED.hasStream, RT.Stream, True)
        self.stream_id = stream_ids[0]

        self.start_event_listener()

        # create the DPD, DataProcess and output DataProduct
        dataprocessdef_id, dataprocess_id, dataproduct_id = self.create_data_process()
        self.dp_list.append(dataprocess_id)

        # validate the repository for data product algorithms persists the new resources  NEW SA-1
        # create_data_process call created one of each
        dpd_ids, _ = self.rrclient.find_resources(restype=OT.DataProcessDefinition, id_only=False)
        # there will be more than one becuase of the DPDs that reperesent the PFs in the data product above
        self.assertTrue(dpd_ids is not None)
        dp_ids, _ = self.rrclient.find_resources(restype=OT.DataProcess, id_only=False)
        # only one DP becuase the PFs that are in the code dataproduct above are not activated yet.
        self.assertEquals(len(dp_ids), 1)


        # validate the name and version label  NEW SA - 2
        dataprocessdef_obj = self.dataprocessclient.read_data_process_definition(dataprocessdef_id)
        self.assertEqual(dataprocessdef_obj.version_label, '1.0a')
        self.assertEqual(dataprocessdef_obj.name, 'add_arrays')

        # validate that the DPD has an attachment  NEW SA - 21
        attachment_ids, assoc_ids = self.rrclient.find_objects(dataprocessdef_id, PRED.hasAttachment, RT.Attachment, True)
        self.assertEqual(len(attachment_ids), 1)
        attachment_obj = self.rrclient.read_attachment(attachment_ids[0])
        log.debug('attachment: %s', attachment_obj)

        # validate that the data process resource has input and output data products associated
        # L4-CI-SA-RQ-364  and NEW SA-3
        outproduct_ids, assoc_ids = self.rrclient.find_objects(dataprocess_id, PRED.hasOutputProduct, RT.DataProduct, True)
        self.assertEqual(len(outproduct_ids), 1)
        inproduct_ids, assoc_ids = self.rrclient.find_objects(dataprocess_id, PRED.hasInputProduct, RT.DataProduct, True)
        self.assertEqual(len(inproduct_ids), 1)

        # Test for provenance. Get Data product produced by the data processes
        output_data_product_id,_ = self.rrclient.find_objects(subject=dataprocess_id,
            object_type=RT.DataProduct,
            predicate=PRED.hasOutputProduct,
            id_only=True)

        output_data_product_provenance = self.dataproductclient.get_data_product_provenance(output_data_product_id[0])

        # Do a basic check to see if there were 3 entries in the provenance graph. Parent and Child and the
        # DataProcessDefinition creating the child from the parent.
        self.assertTrue(len(output_data_product_provenance) == 2)
        self.assertTrue(self.input_dp_id in output_data_product_provenance[output_data_product_id[0]]['parents'])
        self.assertTrue(output_data_product_provenance[output_data_product_id[0]]['parents'][self.input_dp_id]['data_process_definition_id'] == dataprocessdef_id)


        # NEW SA - 4 | Data processing shall include the appropriate data product algorithm name and version number in
        # the metadata of each output data product created by the data product algorithm.
        output_data_product_obj,_ = self.rrclient.find_objects(subject=dataprocess_id,
            object_type=RT.DataProduct,
            predicate=PRED.hasOutputProduct,
            id_only=False)
        self.assertTrue(output_data_product_obj[0].name != None)
        self.assertTrue(output_data_product_obj[0]._rev != None)

        # retrieve subscription from data process
        subscription_objs, _ = self.rrclient.find_objects(subject=dataprocess_id, predicate=PRED.hasSubscription, object_type=RT.Subscription, id_only=False)
        log.debug('test_transform_worker subscription_obj:  %s', subscription_objs[0])

        # create a queue to catch the published granules
        self.subscription_id = self.pubsub_client.create_subscription(name='parsed_subscription', stream_ids=[self.stream_id], exchange_name=subscription_objs[0].exchange_name)
        self.addCleanup(self.pubsub_client.delete_subscription, self.subscription_id)

        self.pubsub_client.activate_subscription(self.subscription_id)
        self.addCleanup(self.pubsub_client.deactivate_subscription, self.subscription_id)

        stream_route = self.pubsub_client.read_stream_route(self.stream_id)
        self.publisher = StandaloneStreamPublisher(stream_id=self.stream_id, stream_route=stream_route )


        for n in range(1, 101):
            rdt = RecordDictionaryTool(stream_definition_id=self.stream_def_id)
            rdt['time']         = [0] # time should always come first
            rdt['conductivity'] = [1]
            rdt['pressure']     = [2]
            rdt['salinity']     = [8]

            self.publisher.publish(rdt.to_granule())

        # validate that the output granule is received and the updated value is correct
        self.assertTrue(self.granule_verified.wait(self.wait_time))


        # validate that the data process loaded into worker event is received    (L4-CI-SA-RQ-182)
        self.assertTrue(self.worker_assigned_event_verified.wait(self.wait_time))

        # validate that the data process create (with data product ids) event is received    (NEW SA -42)
        self.assertTrue(self.dp_created_event_verified.wait(self.wait_time))

        # validate that the data process heartbeat event is received (for every hundred granules processed) (L4-CI-SA-RQ-182)
        #this takes a while so set wait limit to large value
        self.assertTrue(self.heartbeat_event_verified.wait(200))

        # validate that the code from the transform function can be retrieve via inspect_data_process_definition
        src = self.dataprocessclient.inspect_data_process_definition(dataprocessdef_id)
        self.assertIn( 'def add_arrays(a, b)', src)

        # now delete the DPD and DP then verify that the resources are retired so that information required for provenance are still available
        self.dataprocessclient.delete_data_process(dataprocess_id)
        self.dataprocessclient.delete_data_process_definition(dataprocessdef_id)

        in_dp_objs, _ = self.rrclient.find_objects(subject=dataprocess_id, predicate=PRED.hasInputProduct, object_type=RT.DataProduct, id_only=True)
        self.assertTrue(in_dp_objs is not None)

        dpd_objs, _ = self.rrclient.find_subjects(subject_type=RT.DataProcessDefinition, predicate=PRED.hasDataProcess, object=dataprocess_id, id_only=True)
        self.assertTrue(dpd_objs is not None)

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode')
    def test_transform_worker_with_instrumentdevice(self):

        # test that a data process (type: data-product-in / data-product-out) can be defined and launched.
        # verify that the output granule fields are correctly populated

        # test that the input and output data products are linked to facilitate provenance

        self.data_process_objs = []
        self._output_stream_ids = []
        self.event_verified = Event()

        # Create CTD Parsed as the initial data product
        # create a stream definition for the data from the ctd simulator
        self.parameter_dict_id = self.dataset_management_client.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        self.stream_def_id = self.pubsub_client.create_stream_definition(name='stream_def', parameter_dictionary_id=self.parameter_dict_id)

        # create the DataProduct that is the input to the data processes
        input_dp_obj = IonObject(  RT.DataProduct, name='input_data_product', description='input test stream',
            temporal_domain = self.time_dom.dump(),  spatial_domain = self.spatial_dom.dump())
        self.input_dp_id = self.dataproductclient.create_data_product(data_product=input_dp_obj,  stream_definition_id=self.stream_def_id)

        # retrieve the Stream for this data product
        stream_ids, assoc_ids = self.rrclient.find_objects(self.input_dp_id, PRED.hasStream, RT.Stream, True)
        self.stream_id = stream_ids[0]

        log.debug('new ctd_parsed_data_product_id = %s' % self.input_dp_id)

        # only ever need one device for testing purposes.
        instDevice_obj,_ = self.rrclient.find_resources(restype=RT.InstrumentDevice, name='test_ctd_device')
        if instDevice_obj:
            instDevice_id = instDevice_obj[0]._id
        else:
            instDevice_obj = IonObject(RT.InstrumentDevice, name='test_ctd_device', description="test_ctd_device", serial_number="12345" )
            instDevice_id = self.imsclient.create_instrument_device(instrument_device=instDevice_obj)

        self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=self.input_dp_id)

        # create the DPD, DataProcess and output DataProduct
        dataprocessdef_id, dataprocess_id, dataproduct_id = self.create_data_process()

        self.addCleanup(self.dataprocessclient.delete_data_process, dataprocess_id)
        self.addCleanup(self.dataprocessclient.delete_data_process_definition, dataprocessdef_id)

        # Test for provenance. Get Data product produced by the data processes
        output_data_product_id,_ = self.rrclient.find_objects(subject=dataprocess_id,
            object_type=RT.DataProduct,
            predicate=PRED.hasOutputProduct,
            id_only=True)

        output_data_product_provenance = self.dataproductclient.get_data_product_provenance(output_data_product_id[0])

        # Do a basic check to see if there were 3 entries in the provenance graph. Parent and Child and the
        # DataProcessDefinition creating the child from the parent.
        self.assertTrue(len(output_data_product_provenance) == 3)
        self.assertTrue(self.input_dp_id in output_data_product_provenance[output_data_product_id[0]]['parents'])
        self.assertTrue(instDevice_id in output_data_product_provenance[self.input_dp_id]['parents'])
        self.assertTrue(output_data_product_provenance[instDevice_id]['type'] == 'InstrumentDevice')

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode')
    def test_transform_worker_with_platformdevice(self):

        # test that a data process (type: data-product-in / data-product-out) can be defined and launched.
        # verify that the output granule fields are correctly populated

        # test that the input and output data products are linked to facilitate provenance

        self.data_process_objs = []
        self._output_stream_ids = []
        self.event_verified = Event()

        # Create CTD Parsed as the initial data product
        # create a stream definition for the data from the ctd simulator
        self.parameter_dict_id = self.dataset_management_client.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        self.stream_def_id = self.pubsub_client.create_stream_definition(name='stream_def', parameter_dictionary_id=self.parameter_dict_id)

        # create the DataProduct that is the input to the data processes
        input_dp_obj = IonObject(  RT.DataProduct, name='input_data_product', description='input test stream',
            temporal_domain = self.time_dom.dump(),  spatial_domain = self.spatial_dom.dump())
        self.input_dp_id = self.dataproductclient.create_data_product(data_product=input_dp_obj,  stream_definition_id=self.stream_def_id)

        # retrieve the Stream for this data product
        stream_ids, assoc_ids = self.rrclient.find_objects(self.input_dp_id, PRED.hasStream, RT.Stream, True)
        self.stream_id = stream_ids[0]

        log.debug('new ctd_parsed_data_product_id = %s' % self.input_dp_id)

        # only ever need one device for testing purposes.
        platform_device_obj,_ = self.rrclient.find_resources(restype=RT.PlatformDevice, name='TestPlatform')
        if platform_device_obj:
            platform_device_id = platform_device_obj[0]._id
        else:
            platform_device_obj = IonObject(RT.PlatformDevice, name='TestPlatform', description="TestPlatform", serial_number="12345" )
            platform_device_id = self.imsclient.create_platform_device(platform_device=platform_device_obj)

        self.damsclient.assign_data_product(input_resource_id=platform_device_id, data_product_id=self.input_dp_id)

        # create the DPD, DataProcess and output DataProduct
        dataprocessdef_id, dataprocess_id, dataproduct_id = self.create_data_process()
        self.addCleanup(self.dataprocessclient.delete_data_process, dataprocess_id)
        self.addCleanup(self.dataprocessclient.delete_data_process_definition, dataprocessdef_id)

        # Test for provenance. Get Data product produced by the data processes
        output_data_product_id,_ = self.rrclient.find_objects(subject=dataprocess_id,
            object_type=RT.DataProduct,
            predicate=PRED.hasOutputProduct,
            id_only=True)

        output_data_product_provenance = self.dataproductclient.get_data_product_provenance(output_data_product_id[0])

        # Do a basic check to see if there were 3 entries in the provenance graph. Parent and Child and the
        # DataProcessDefinition creating the child from the parent.
        self.assertTrue(len(output_data_product_provenance) == 3)
        self.assertTrue(self.input_dp_id in output_data_product_provenance[output_data_product_id[0]]['parents'])
        self.assertTrue(platform_device_id in output_data_product_provenance[self.input_dp_id]['parents'])
        self.assertTrue(output_data_product_provenance[platform_device_id]['type'] == 'PlatformDevice')


    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode')
    def test_event_transform_worker(self):
        self.data_process_objs = []
        self._output_stream_ids = []
        self.event_verified = Event()


        # test that a data process (type: data-product-in / event-out) can be defined and launched.
        # verify that event fields are correctly populated


        self.parameter_dict_id = self.dataset_management_client.read_parameter_dictionary_by_name(name='ctd_parsed_param_dict', id_only=True)

        # create the StreamDefinition
        self.stream_def_id = self.pubsub_client.create_stream_definition(name='stream_def', parameter_dictionary_id=self.parameter_dict_id)
        self.addCleanup(self.pubsub_client.delete_stream_definition, self.stream_def_id)

        # create the DataProduct
        input_dp_obj = IonObject(  RT.DataProduct, name='input_data_product', description='input test stream',
                                             temporal_domain = self.time_dom.dump(),  spatial_domain = self.spatial_dom.dump())
        self.input_dp_id = self.dataproductclient.create_data_product(data_product=input_dp_obj,  stream_definition_id=self.stream_def_id)

        # retrieve the Stream for this data product
        stream_ids, assoc_ids = self.rrclient.find_objects(self.input_dp_id, PRED.hasStream, RT.Stream, True)
        self.stream_id = stream_ids[0]

        # create the DPD and two DPs
        self.event_data_process_id = self.create_event_data_processes()

        # retrieve subscription from data process
        subscription_objs, _ = self.rrclient.find_objects(subject=self.event_data_process_id, predicate=PRED.hasSubscription, object_type=RT.Subscription, id_only=False)
        log.debug('test_event_transform_worker subscription_obj:  %s', subscription_objs[0])

        # create a queue to catch the published granules
        self.subscription_id = self.pubsub_client.create_subscription(name='parsed_subscription', stream_ids=[self.stream_id], exchange_name=subscription_objs[0].exchange_name)
        self.addCleanup(self.pubsub_client.delete_subscription, self.subscription_id)

        self.pubsub_client.activate_subscription(self.subscription_id)
        self.addCleanup(self.pubsub_client.deactivate_subscription, self.subscription_id)

        stream_route = self.pubsub_client.read_stream_route(self.stream_id)
        self.publisher = StandaloneStreamPublisher(stream_id=self.stream_id, stream_route=stream_route )

        self.start_event_transform_listener()

        self.data_modified = Event()

        rdt = RecordDictionaryTool(stream_definition_id=self.stream_def_id)
        rdt['time']         = [0] # time should always come first
        rdt['conductivity'] = [1]
        rdt['pressure']     = [2]
        rdt['salinity']     = [8]

        self.publisher.publish(rdt.to_granule())

        self.assertTrue(self.event_verified.wait(self.wait_time))



    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode')
    def test_bad_argument_map(self):
        self._output_stream_ids = []

        # test that a data process (type: data-product-in / data-product-out) parameter mapping it validated during
        # data process creation and that the correct exception is raised for both input and output.

        self.parameter_dict_id = self.dataset_management_client.read_parameter_dictionary_by_name(name='ctd_parsed_param_dict', id_only=True)

        # create the StreamDefinition
        self.stream_def_id = self.pubsub_client.create_stream_definition(name='stream_def', parameter_dictionary_id=self.parameter_dict_id)
        self.addCleanup(self.pubsub_client.delete_stream_definition, self.stream_def_id)

        # create the DataProduct that is the input to the data processes
        input_dp_obj = IonObject(  RT.DataProduct, name='input_data_product', description='input test stream',
                                             temporal_domain = self.time_dom.dump(),  spatial_domain = self.spatial_dom.dump())
        self.input_dp_id = self.dataproductclient.create_data_product(data_product=input_dp_obj,  stream_definition_id=self.stream_def_id)

        # two data processes using one transform and one DPD

        dp1_func_output_dp_id =  self.create_output_data_product()


        # Set up DPD and DP #2 - array add function
        tf_obj = IonObject(RT.TransformFunction,
            name='add_array_func',
            description='adds values in an array',
            function='add_arrays',
            module="ion_example.add_arrays",
            arguments=['arr1', 'arr2'],
            function_type=TransformFunctionType.TRANSFORM,
            uri='http://sddevrepo.oceanobservatories.org/releases/ion_example-0.1-py2.7.egg'
            )
        add_array_func_id, rev = self.rrclient.create(tf_obj)

        dpd_obj = IonObject(RT.DataProcessDefinition,
            name='add_arrays',
            description='adds the values of two arrays',
            data_process_type=DataProcessTypeEnum.TRANSFORM_PROCESS
            )
        add_array_dpd_id = self.dataprocessclient.create_data_process_definition(data_process_definition=dpd_obj, function_id=add_array_func_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(self.stream_def_id, add_array_dpd_id, binding='add_array_func' )

        # create the data process with invalid argument map
        argument_map = {"arr1": "foo", "arr2": "bar"}
        output_param = "salinity"
        with self.assertRaises(BadRequest) as cm:
            dp1_data_process_id = self.dataprocessclient.create_data_process(data_process_definition_id=add_array_dpd_id, inputs=[self.input_dp_id],
                                                                                 outputs=[dp1_func_output_dp_id], argument_map=argument_map, out_param_name=output_param)

        ex = cm.exception
        log.debug(' exception raised: %s', cm)
        self.assertEqual(ex.message, "Input data product does not contain the parameters defined in argument map")

        # create the data process with invalid output parameter name
        argument_map = {"arr1": "conductivity", "arr2": "pressure"}
        output_param = "foo"
        with self.assertRaises(BadRequest) as cm:
            dp1_data_process_id = self.dataprocessclient.create_data_process(data_process_definition_id=add_array_dpd_id, inputs=[self.input_dp_id],
                                                                                 outputs=[dp1_func_output_dp_id], argument_map=argument_map, out_param_name=output_param)

        ex = cm.exception
        log.debug(' exception raised: %s', cm)
        self.assertEqual(ex.message, "Output data product does not contain the output parameter name provided")


    def create_event_data_processes(self):

        # two data processes using one transform and one DPD
        argument_map= {"a": "salinity"}


        # set up DPD and DP #2 - array add function
        tf_obj = IonObject(RT.TransformFunction,
            name='validate_salinity_array',
            description='validate_salinity_array',
            function='validate_salinity_array',
            module="ion.processes.data.transforms.test.test_transform_worker",
            arguments=['a'],
            function_type=TransformFunctionType.TRANSFORM
            )

        add_array_func_id, rev = self.rrclient.create(tf_obj)

        dpd_obj = IonObject(RT.DataProcessDefinition,
            name='validate_salinity_array',
            description='validate_salinity_array',
            data_process_type=DataProcessTypeEnum.TRANSFORM_PROCESS,
            )
        add_array_dpd_id = self.dataprocessclient.create_data_process_definition(data_process_definition=dpd_obj, function_id=add_array_func_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(self.stream_def_id, add_array_dpd_id, binding='validate_salinity_array' )

        # create the data process
        dp1_data_process_id = self.dataprocessclient.create_data_process(data_process_definition_id=add_array_dpd_id, inputs=[self.input_dp_id],
                                                                             outputs=None, argument_map=argument_map)
        self.damsclient.register_process(dp1_data_process_id)
        self.addCleanup(self.dataprocessclient.delete_data_process, dp1_data_process_id)

        return dp1_data_process_id

    def create_data_process(self):

        # two data processes using one transform and one DPD

        dp1_func_output_dp_id =  self.create_output_data_product()
        argument_map = {"arr1": "conductivity", "arr2": "pressure"}
        output_param = "salinity"


        # set up DPD and DP #2 - array add function
        tf_obj = IonObject(RT.TransformFunction,
            name='add_array_func',
            description='adds values in an array',
            function='add_arrays',
            module="ion_example.add_arrays",
            arguments=['arr1', 'arr2'],
            function_type=TransformFunctionType.TRANSFORM,
             uri='http://sddevrepo.oceanobservatories.org/releases/ion_example-0.1-py2.7.egg'
            )
        add_array_func_id, rev = self.rrclient.create(tf_obj)

        dpd_obj = IonObject(RT.DataProcessDefinition,
            name='add_arrays',
            description='adds the values of two arrays',
            data_process_type=DataProcessTypeEnum.TRANSFORM_PROCESS,
            version_label='1.0a'
            )
        add_array_dpd_id = self.dataprocessclient.create_data_process_definition(data_process_definition=dpd_obj, function_id=add_array_func_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(self.stream_def_id, add_array_dpd_id, binding='add_array_func' )

        # create the data process
        dp1_data_process_id = self.dataprocessclient.create_data_process(data_process_definition_id=add_array_dpd_id, inputs=[self.input_dp_id],
                                                                             outputs=[dp1_func_output_dp_id], argument_map=argument_map, out_param_name=output_param)
        self.damsclient.register_process(dp1_data_process_id)
        #self.addCleanup(self.dataprocessclient.delete_data_process, dp1_data_process_id)

        # add an attachment object to this DPD to test new SA-21
        import msgpack
        attachment_content = 'foo bar'
        attachment_obj = IonObject( RT.Attachment,
                                name='test_attachment',
                                attachment_type=AttachmentType.ASCII,
                                content_type='text/plain',
                                content=msgpack.packb(attachment_content))
        att_id = self.rrclient.create_attachment(add_array_dpd_id, attachment_obj)
        self.addCleanup(self.rrclient.delete_attachment, att_id)

        return add_array_dpd_id, dp1_data_process_id, dp1_func_output_dp_id


    def create_output_data_product(self):
        dp1_outgoing_stream_id = self.pubsub_client.create_stream_definition(name='dp1_stream', parameter_dictionary_id=self.parameter_dict_id)

        dp1_output_dp_obj = IonObject(  RT.DataProduct,
            name='data_process1_data_product',
            description='output of add array func',
            temporal_domain = self.time_dom.dump(),
            spatial_domain = self.spatial_dom.dump())

        dp1_func_output_dp_id = self.dataproductclient.create_data_product(dp1_output_dp_obj,  dp1_outgoing_stream_id)
        self.addCleanup(self.dataproductclient.delete_data_product, dp1_func_output_dp_id)
        # retrieve the id of the OUTPUT stream from the out Data Product and add to granule logger
        stream_ids, _ = self.rrclient.find_objects(dp1_func_output_dp_id, PRED.hasStream, None, True)
        self._output_stream_ids.append(stream_ids[0])

        subscription_id = self.pubsub_client.create_subscription('validator', data_product_ids=[dp1_func_output_dp_id])
        self.addCleanup(self.pubsub_client.delete_subscription, subscription_id)

        def on_granule(msg, route, stream_id):
            log.debug('recv_packet stream_id: %s route: %s   msg: %s', stream_id, route, msg)
            self.validate_output_granule(msg, route, stream_id)
            self.granule_verified.set()

        validator = StandaloneStreamSubscriber('validator', callback=on_granule)
        validator.start()
        self.addCleanup(validator.stop)

        self.pubsub_client.activate_subscription(subscription_id)
        self.addCleanup(self.pubsub_client.deactivate_subscription, subscription_id)

        return dp1_func_output_dp_id


    def validate_event(self, *args, **kwargs):
        """
        This method is a callback function for receiving DataProcessStatusEvent.
        """
        data_process_event = args[0]
        log.debug("DataProcessStatusEvent: %s" ,  str(data_process_event.__dict__))

        # if data process already created, check origin
        if self.dp_list:
            self.assertIn( data_process_event.origin, self.dp_list)

            # if this is a heartbeat event then 100 granules have been processed
            if 'data process status update.' in data_process_event.description:
                self.heartbeat_event_verified.set()

        else:
            # else check that this is the assign event

            if 'Data process assigned to transform worker' in data_process_event.description:
                self.worker_assigned_event_verified.set()
            elif 'Data process created for data product' in data_process_event.description:
                self.dp_created_event_verified.set()


    def validate_output_granule(self, msg, route, stream_id):
        self.assertIn( stream_id, self._output_stream_ids)

        rdt = RecordDictionaryTool.load_from_granule(msg)
        log.debug('validate_output_granule  rdt: %s', rdt)
        sal_val = rdt['salinity']
        np.testing.assert_array_equal(sal_val, np.array([3]))

    def start_event_listener(self):

        es = EventSubscriber(event_type=OT.DataProcessStatusEvent, callback=self.validate_event)
        es.start()

        self.addCleanup(es.stop)

    def validate_transform_event(self, *args, **kwargs):
        """
        This method is a callback function for receiving DataProcessStatusEvent.
        """
        status_alert_event = args[0]

        np.testing.assert_array_equal(status_alert_event.origin, self.stream_id )
        np.testing.assert_array_equal(status_alert_event.values, np.array([self.event_data_process_id]))
        log.debug("DeviceStatusAlertEvent: %s" ,  str(status_alert_event.__dict__))
        self.event_verified.set()


    def start_event_transform_listener(self):
        es = EventSubscriber(event_type=OT.DeviceStatusAlertEvent, callback=self.validate_transform_event)
        es.start()

        self.addCleanup(es.stop)


    def test_download(self):
        egg_url = 'http://sddevrepo.oceanobservatories.org/releases/ion_example-0.1-py2.7.egg'
        egg_path = TransformWorker.download_egg(egg_url)

        import pkg_resources
        pkg_resources.working_set.add_entry(egg_path)

        from ion_example.add_arrays import add_arrays

        a = add_arrays(1,2)
        self.assertEquals(a,3)
class NotificationWorker(SimpleProcess):
    """
    Instances of this class acts as a Notification Worker.
    """

    def on_init(self):
        self.event_pub = EventPublisher()
        self.user_info = {}
        self.resource_registry = ResourceRegistryServiceClient()

    def test_hook(self, user_info, reverse_user_info):
        """
        This method exists only to facilitate the testing of the reload of the user_info dictionary
        """
        pass

    def on_start(self):
        super(NotificationWorker, self).on_start()

        self.smtp_client = setting_up_smtp_client()

        # ------------------------------------------------------------------------------------
        # Start by loading the user info and reverse user info dictionaries
        # ------------------------------------------------------------------------------------

        try:
            self.user_info = self.load_user_info()
            self.reverse_user_info = calculate_reverse_user_info(self.user_info)

            log.info("On start up, notification workers loaded the following user_info dictionary: %s" % self.user_info)
            log.info("The calculated reverse user info: %s" % self.reverse_user_info)

        except NotFound as exc:
            if exc.message.find("users_index") > -1:
                log.warning("Notification workers found on start up that users_index have not been loaded yet.")
            else:
                raise NotFound(exc.message)

        # ------------------------------------------------------------------------------------
        # Create an event subscriber for Reload User Info events
        # ------------------------------------------------------------------------------------

        def reload_user_info(event_msg, headers):
            """
            Callback method for the subscriber to ReloadUserInfoEvent
            """

            notification_id = event_msg.notification_id
            log.info(
                "(Notification worker received a ReloadNotificationEvent. The relevant notification_id is %s"
                % notification_id
            )

            try:
                self.user_info = self.load_user_info()
            except NotFound:
                log.warning("ElasticSearch has not yet loaded the user_index.")

            self.reverse_user_info = calculate_reverse_user_info(self.user_info)
            self.test_hook(self.user_info, self.reverse_user_info)

            log.debug("After a reload, the user_info: %s" % self.user_info)
            log.debug("The recalculated reverse_user_info: %s" % self.reverse_user_info)

        # the subscriber for the ReloadUSerInfoEvent
        self.reload_user_info_subscriber = EventSubscriber(event_type="ReloadUserInfoEvent", callback=reload_user_info)
        self.reload_user_info_subscriber.start()

        # ------------------------------------------------------------------------------------
        # Create an event subscriber for all events that are of interest for notifications
        # ------------------------------------------------------------------------------------

        self.event_subscriber = EventSubscriber(queue_name="uns_queue", callback=self.process_event)
        self.event_subscriber.start()

    def process_event(self, msg, headers):
        """
        Callback method for the subscriber listening for all events
        """
        # ------------------------------------------------------------------------------------
        # From the reverse user info dict find out which users have subscribed to that event
        # ------------------------------------------------------------------------------------

        users = []
        if self.reverse_user_info:
            users = check_user_notification_interest(event=msg, reverse_user_info=self.reverse_user_info)

        log.info("Type of event received by notification worker: %s" % msg.type_)
        log.info("Notification worker deduced the following users were interested in the event: %s" % users)

        # ------------------------------------------------------------------------------------
        # Send email to the users
        # ------------------------------------------------------------------------------------

        for user_name in users:
            msg_recipient = self.user_info[user_name]["user_contact"].email
            send_email(message=msg, msg_recipient=msg_recipient, smtp_client=self.smtp_client)

    def on_stop(self):
        # close subscribers safely
        self.event_subscriber.stop()
        self.reload_user_info_subscriber.stop()

    def on_quit(self):
        # close subscribers safely
        self.event_subscriber.stop()
        self.reload_user_info_subscriber.stop()

    def poll(self, tries, callback, *args, **kwargs):
        """
        Polling wrapper for queries
        Elasticsearch may not index and cache the changes right away so we may need
        a couple of tries and a little time to go by before the results show.
        """
        for i in xrange(tries):
            retval = callback(*args, **kwargs)
            if retval:
                return retval
            time.sleep(0.2)
        return None

    def load_user_info(self):
        """
        Method to load the user info dictionary used by the notification workers and the UNS

        @retval user_info dict
        """

        users, _ = self.resource_registry.find_resources(restype=RT.UserInfo)

        user_info = {}

        if not users:
            return {}

        for user in users:
            notifications = []
            for variable in user.variables:
                if variable["name"] == "notifications":
                    notifications = variable["value"]

            user_info[user.name] = {"user_contact": user.contact, "notifications": notifications}

        return user_info
class TestDMEnd2End(IonIntegrationTestCase):
    def setUp(self): # Love the non pep-8 convention
        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.process_dispatcher   = ProcessDispatcherServiceClient()
        self.pubsub_management    = PubsubManagementServiceClient()
        self.resource_registry    = ResourceRegistryServiceClient()
        self.dataset_management   = DatasetManagementServiceClient()
        self.ingestion_management = IngestionManagementServiceClient()
        self.data_retriever       = DataRetrieverServiceClient()
        self.pids                 = []
        self.event                = Event()
        self.exchange_space_name  = 'test_granules'
        self.exchange_point_name  = 'science_data'       
        self.i                    = 0

        self.purge_queues()
        self.queue_buffer         = []
        self.streams = []
        self.addCleanup(self.stop_all_ingestion)

    def purge_queues(self):
        xn = self.container.ex_manager.create_xn_queue('science_granule_ingestion')
        xn.purge()
        

    def tearDown(self):
        self.purge_queues()
        for pid in self.pids:
            self.container.proc_manager.terminate_process(pid)
        IngestionManagementIntTest.clean_subscriptions()
        for queue in self.queue_buffer:
            if isinstance(queue, ExchangeNameQueue):
                queue.delete()
            elif isinstance(queue, str):
                xn = self.container.ex_manager.create_xn_queue(queue)
                xn.delete()

    #--------------------------------------------------------------------------------
    # Helper/Utility methods
    #--------------------------------------------------------------------------------
        
    def create_dataset(self, parameter_dict_id=''):
        '''
        Creates a time-series dataset
        '''
        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()
        if not parameter_dict_id:
            parameter_dict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)

        dataset_id = self.dataset_management.create_dataset('test_dataset_%i'%self.i, parameter_dictionary_id=parameter_dict_id, spatial_domain=sdom, temporal_domain=tdom)
        return dataset_id
    
    def get_datastore(self, dataset_id):
        '''
        Gets an instance of the datastore
            This method is primarily used to defeat a bug where integration tests in multiple containers may sometimes 
            delete a CouchDB datastore and the other containers are unaware of the new state of the datastore.
        '''
        dataset = self.dataset_management.read_dataset(dataset_id)
        datastore_name = dataset.datastore_name
        datastore = self.container.datastore_manager.get_datastore(datastore_name, DataStore.DS_PROFILE.SCIDATA)
        return datastore
    
    def get_ingestion_config(self):
        '''
        Grab the ingestion configuration from the resource registry
        '''
        # The ingestion configuration should have been created by the bootstrap service 
        # which is configured through r2deploy.yml

        ingest_configs, _  = self.resource_registry.find_resources(restype=RT.IngestionConfiguration,id_only=True)
        return ingest_configs[0]

    def launch_producer(self, stream_id=''):
        '''
        Launch the producer
        '''

        pid = self.container.spawn_process('better_data_producer', 'ion.processes.data.example_data_producer', 'BetterDataProducer', {'process':{'stream_id':stream_id}})

        self.pids.append(pid)

    def make_simple_dataset(self):
        '''
        Makes a stream, a stream definition and a dataset, the essentials for most of these tests
        '''
        pdict_id             = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        stream_def_id        = self.pubsub_management.create_stream_definition('ctd data', parameter_dictionary_id=pdict_id)
        stream_id, route     = self.pubsub_management.create_stream('ctd stream %i' % self.i, 'xp1', stream_definition_id=stream_def_id)

        dataset_id = self.create_dataset(pdict_id)

        self.get_datastore(dataset_id)
        self.i += 1
        return stream_id, route, stream_def_id, dataset_id

    def publish_hifi(self,stream_id,stream_route,offset=0):
        '''
        Publish deterministic data
        '''

        pub = StandaloneStreamPublisher(stream_id, stream_route)

        stream_def = self.pubsub_management.read_stream_definition(stream_id=stream_id)
        stream_def_id = stream_def._id
        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
        rdt['time'] = np.arange(10) + (offset * 10)
        rdt['temp'] = np.arange(10) + (offset * 10)
        pub.publish(rdt.to_granule())

    def publish_fake_data(self,stream_id, route):
        '''
        Make four granules
        '''
        for i in xrange(4):
            self.publish_hifi(stream_id,route,i)

    def start_ingestion(self, stream_id, dataset_id):
        '''
        Starts ingestion/persistence for a given dataset
        '''
        ingest_config_id = self.get_ingestion_config()
        self.ingestion_management.persist_data_stream(stream_id=stream_id, ingestion_configuration_id=ingest_config_id, dataset_id=dataset_id)
    
    def stop_ingestion(self, stream_id):
        ingest_config_id = self.get_ingestion_config()
        self.ingestion_management.unpersist_data_stream(stream_id=stream_id, ingestion_configuration_id=ingest_config_id)
        
    def stop_all_ingestion(self):
        try:
            [self.stop_ingestion(sid) for sid in self.streams]
        except:
            pass

    def validate_granule_subscription(self, msg, route, stream_id):
        '''
        Validation for granule format
        '''
        if msg == {}:
            return
        rdt = RecordDictionaryTool.load_from_granule(msg)
        log.info('%s', rdt.pretty_print())
        self.assertIsInstance(msg,Granule,'Message is improperly formatted. (%s)' % type(msg))
        self.event.set()

    def wait_until_we_have_enough_granules(self, dataset_id='',data_size=40):
        '''
        Loops until there is a sufficient amount of data in the dataset
        '''
        done = False
        with gevent.Timeout(40):
            while not done:
                extents = self.dataset_management.dataset_extents(dataset_id, 'time')[0]
                granule = self.data_retriever.retrieve_last_data_points(dataset_id, 1)
                rdt     = RecordDictionaryTool.load_from_granule(granule)
                if rdt['time'] and rdt['time'][0] != rdt._pdict.get_context('time').fill_value and extents >= data_size:
                    done = True
                else:
                    gevent.sleep(0.2)




    #--------------------------------------------------------------------------------
    # Test Methods
    #--------------------------------------------------------------------------------

    @attr('SMOKE') 
    def test_dm_end_2_end(self):
        #--------------------------------------------------------------------------------
        # Set up a stream and have a mock instrument (producer) send data
        #--------------------------------------------------------------------------------
        self.event.clear()

        # Get a precompiled parameter dictionary with basic ctd fields
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict',id_only=True)
        context_ids = self.dataset_management.read_parameter_contexts(pdict_id, id_only=True)

        # Add a field that supports binary data input.
        bin_context = ParameterContext('binary',  param_type=ArrayType())
        context_ids.append(self.dataset_management.create_parameter_context('binary', bin_context.dump()))
        # Add another field that supports dictionary elements.
        rec_context = ParameterContext('records', param_type=RecordType())
        context_ids.append(self.dataset_management.create_parameter_context('records', rec_context.dump()))

        pdict_id = self.dataset_management.create_parameter_dictionary('replay_pdict', parameter_context_ids=context_ids, temporal_context='time')
        
        stream_definition = self.pubsub_management.create_stream_definition('ctd data', parameter_dictionary_id=pdict_id)


        stream_id, route = self.pubsub_management.create_stream('producer', exchange_point=self.exchange_point_name, stream_definition_id=stream_definition)




        #--------------------------------------------------------------------------------
        # Start persisting the data on the stream 
        # - Get the ingestion configuration from the resource registry
        # - Create the dataset
        # - call persist_data_stream to setup the subscription for the ingestion workers
        #   on the stream that you specify which causes the data to be persisted
        #--------------------------------------------------------------------------------

        ingest_config_id = self.get_ingestion_config()
        dataset_id = self.create_dataset(pdict_id)
        self.ingestion_management.persist_data_stream(stream_id=stream_id, ingestion_configuration_id=ingest_config_id, dataset_id=dataset_id)

        #--------------------------------------------------------------------------------
        # Now the granules are ingesting and persisted
        #--------------------------------------------------------------------------------

        self.launch_producer(stream_id)
        self.wait_until_we_have_enough_granules(dataset_id,40)
        
        #--------------------------------------------------------------------------------
        # Now get the data in one chunk using an RPC Call to start_retreive
        #--------------------------------------------------------------------------------
        
        replay_data = self.data_retriever.retrieve(dataset_id)
        self.assertIsInstance(replay_data, Granule)
        rdt = RecordDictionaryTool.load_from_granule(replay_data)
        self.assertTrue((rdt['time'][:10] == np.arange(10)).all(),'%s' % rdt['time'][:])
        self.assertTrue((rdt['binary'][:10] == np.array(['hi']*10, dtype='object')).all())

        
        #--------------------------------------------------------------------------------
        # Now to try the streamed approach
        #--------------------------------------------------------------------------------
        replay_stream_id, replay_route = self.pubsub_management.create_stream('replay_out', exchange_point=self.exchange_point_name, stream_definition_id=stream_definition)
        self.replay_id, process_id =  self.data_retriever.define_replay(dataset_id=dataset_id, stream_id=replay_stream_id)
        log.info('Process ID: %s', process_id)

        replay_client = ReplayClient(process_id)

    
        #--------------------------------------------------------------------------------
        # Create the listening endpoint for the the retriever to talk to 
        #--------------------------------------------------------------------------------
        xp = self.container.ex_manager.create_xp(self.exchange_point_name)
        subscriber = StandaloneStreamSubscriber(self.exchange_space_name, self.validate_granule_subscription)
        self.queue_buffer.append(self.exchange_space_name)
        subscriber.start()
        subscriber.xn.bind(replay_route.routing_key, xp)

        self.data_retriever.start_replay_agent(self.replay_id)

        self.assertTrue(replay_client.await_agent_ready(5), 'The process never launched')
        replay_client.start_replay()
        
        self.assertTrue(self.event.wait(10))
        subscriber.stop()

        self.data_retriever.cancel_replay_agent(self.replay_id)


        #--------------------------------------------------------------------------------
        # Test the slicing capabilities
        #--------------------------------------------------------------------------------

        granule = self.data_retriever.retrieve(dataset_id=dataset_id, query={'tdoa':slice(0,5)})
        rdt = RecordDictionaryTool.load_from_granule(granule)
        b = rdt['time'] == np.arange(5)
        self.assertTrue(b.all() if not isinstance(b,bool) else b)
        self.streams.append(stream_id)
        self.stop_ingestion(stream_id)


    def test_coverage_transform(self):
        ph = ParameterHelper(self.dataset_management, self.addCleanup)
        pdict_id = ph.create_parsed()
        stream_def_id = self.pubsub_management.create_stream_definition('ctd parsed', parameter_dictionary_id=pdict_id)
        self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id)

        stream_id, route = self.pubsub_management.create_stream('example', exchange_point=self.exchange_point_name, stream_definition_id=stream_def_id)
        self.addCleanup(self.pubsub_management.delete_stream, stream_id)

        ingestion_config_id = self.get_ingestion_config()
        dataset_id = self.create_dataset(pdict_id)

        self.ingestion_management.persist_data_stream(stream_id=stream_id, ingestion_configuration_id=ingestion_config_id, dataset_id=dataset_id)
        self.addCleanup(self.ingestion_management.unpersist_data_stream, stream_id, ingestion_config_id)
        publisher = StandaloneStreamPublisher(stream_id, route)
        
        rdt = ph.get_rdt(stream_def_id)
        ph.fill_parsed_rdt(rdt)

        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)

        publisher.publish(rdt.to_granule())
        self.assertTrue(dataset_monitor.event.wait(30))

        replay_granule = self.data_retriever.retrieve(dataset_id)
        rdt_out = RecordDictionaryTool.load_from_granule(replay_granule)

        np.testing.assert_array_almost_equal(rdt_out['time'], rdt['time'])
        np.testing.assert_array_almost_equal(rdt_out['temp'], rdt['temp'])

        np.testing.assert_array_almost_equal(rdt_out['conductivity_L1'], np.array([42.914]))
        np.testing.assert_array_almost_equal(rdt_out['temp_L1'], np.array([20.]))
        np.testing.assert_array_almost_equal(rdt_out['pressure_L1'], np.array([3.068]))
        np.testing.assert_array_almost_equal(rdt_out['density'], np.array([1021.7144739593881]))
        np.testing.assert_array_almost_equal(rdt_out['salinity'], np.array([30.935132729668283]))


    def test_qc_events(self):
        ph = ParameterHelper(self.dataset_management, self.addCleanup)
        pdict_id = ph.create_qc_pdict()
        stream_def_id = self.pubsub_management.create_stream_definition('qc stream def', parameter_dictionary_id=pdict_id)
        self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id)

        stream_id, route = self.pubsub_management.create_stream('qc stream', exchange_point=self.exchange_point_name, stream_definition_id=stream_def_id)
        self.addCleanup(self.pubsub_management.delete_stream, stream_id)

        ingestion_config_id = self.get_ingestion_config()
        dataset_id = self.create_dataset(pdict_id)
        config = DotDict()

        self.ingestion_management.persist_data_stream(stream_id=stream_id, ingestion_configuration_id=ingestion_config_id, dataset_id=dataset_id, config=config)
        self.addCleanup(self.ingestion_management.unpersist_data_stream, stream_id, ingestion_config_id)

        publisher = StandaloneStreamPublisher(stream_id, route)
        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
        rdt['time'] = np.arange(10)
        rdt['temp'] = np.arange(10) * 3

        verified = Event()
        def verification(event, *args, **kwargs):
            self.assertEquals(event.qc_parameter, 'temp_qc')
            self.assertEquals(event.temporal_value, 7)
            verified.set()

        es = EventSubscriber(event_type=OT.ParameterQCEvent, origin=dataset_id, callback=verification, auto_delete=True)
        es.start()
        self.addCleanup(es.stop)

        publisher.publish(rdt.to_granule())
        self.assertTrue(verified.wait(10))



    def test_lookup_values_ingest_replay(self):
        ph = ParameterHelper(self.dataset_management, self.addCleanup)
        pdict_id = ph.create_lookups()
        stream_def_id = self.pubsub_management.create_stream_definition('lookups', parameter_dictionary_id=pdict_id)
        self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id)

        stream_id, route = self.pubsub_management.create_stream('example', exchange_point=self.exchange_point_name, stream_definition_id=stream_def_id)
        self.addCleanup(self.pubsub_management.delete_stream, stream_id)

        ingestion_config_id = self.get_ingestion_config()
        dataset_id = self.create_dataset(pdict_id)
        config = DotDict()
        config.process.lookup_docs = ['test1', 'test2']
        self.ingestion_management.persist_data_stream(stream_id=stream_id, ingestion_configuration_id=ingestion_config_id, dataset_id=dataset_id, config=config)
        self.addCleanup(self.ingestion_management.unpersist_data_stream, stream_id, ingestion_config_id)

        stored_value_manager = StoredValueManager(self.container)
        stored_value_manager.stored_value_cas('test1',{'offset_a':10.0, 'offset_b':13.1})
        
        publisher = StandaloneStreamPublisher(stream_id, route)
        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
        rdt['time'] = np.arange(20)
        rdt['temp'] = [20.0] * 20

        granule = rdt.to_granule()

        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)

        publisher.publish(granule)
        self.assertTrue(dataset_monitor.event.wait(30))
        
        replay_granule = self.data_retriever.retrieve(dataset_id)
        rdt_out = RecordDictionaryTool.load_from_granule(replay_granule)

        np.testing.assert_array_almost_equal(rdt_out['time'], np.arange(20))
        np.testing.assert_array_almost_equal(rdt_out['temp'], np.array([20.] * 20))
        np.testing.assert_array_almost_equal(rdt_out['calibrated'], np.array([30.]*20))
        np.testing.assert_array_equal(rdt_out['offset_b'], np.array([rdt_out.fill_value('offset_b')] * 20))

        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
        rdt['time'] = np.arange(20,40)
        rdt['temp'] = [20.0] * 20
        granule = rdt.to_granule()

        dataset_monitor.event.clear()

        stored_value_manager.stored_value_cas('test1',{'offset_a':20.0})
        stored_value_manager.stored_value_cas('coefficient_document',{'offset_b':10.0})
        gevent.sleep(2)

        publisher.publish(granule)
        self.assertTrue(dataset_monitor.event.wait(30))

        replay_granule = self.data_retriever.retrieve(dataset_id)
        rdt_out = RecordDictionaryTool.load_from_granule(replay_granule)

        np.testing.assert_array_almost_equal(rdt_out['time'], np.arange(40))
        np.testing.assert_array_almost_equal(rdt_out['temp'], np.array([20.] * 20 + [20.] * 20))
        np.testing.assert_array_equal(rdt_out['offset_b'], np.array([10.] * 40))
        np.testing.assert_array_almost_equal(rdt_out['calibrated'], np.array([30.]*20 + [40.]*20))
        np.testing.assert_array_almost_equal(rdt_out['calibrated_b'], np.array([40.] * 20 + [50.] * 20))



    @unittest.skip('Doesnt work')
    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode')
    def test_replay_pause(self):
        # Get a precompiled parameter dictionary with basic ctd fields
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict',id_only=True)
        context_ids = self.dataset_management.read_parameter_contexts(pdict_id, id_only=True)

        # Add a field that supports binary data input.
        bin_context = ParameterContext('binary',  param_type=ArrayType())
        context_ids.append(self.dataset_management.create_parameter_context('binary', bin_context.dump()))
        # Add another field that supports dictionary elements.
        rec_context = ParameterContext('records', param_type=RecordType())
        context_ids.append(self.dataset_management.create_parameter_context('records', rec_context.dump()))

        pdict_id = self.dataset_management.create_parameter_dictionary('replay_pdict', parameter_context_ids=context_ids, temporal_context='time')
        

        stream_def_id = self.pubsub_management.create_stream_definition('replay_stream', parameter_dictionary_id=pdict_id)
        replay_stream, replay_route = self.pubsub_management.create_stream('replay', 'xp1', stream_definition_id=stream_def_id)
        dataset_id = self.create_dataset(pdict_id)
        scov = DatasetManagementService._get_simplex_coverage(dataset_id)

        bb = CoverageCraft(scov)
        bb.rdt['time'] = np.arange(100)
        bb.rdt['temp'] = np.random.random(100) + 30
        bb.sync_with_granule()

        DatasetManagementService._persist_coverage(dataset_id, bb.coverage) # This invalidates it for multi-host configurations
        # Set up the subscriber to verify the data
        subscriber = StandaloneStreamSubscriber(self.exchange_space_name, self.validate_granule_subscription)
        xp = self.container.ex_manager.create_xp('xp1')
        self.queue_buffer.append(self.exchange_space_name)
        subscriber.start()
        subscriber.xn.bind(replay_route.routing_key, xp)

        # Set up the replay agent and the client wrapper

        # 1) Define the Replay (dataset and stream to publish on)
        self.replay_id, process_id = self.data_retriever.define_replay(dataset_id=dataset_id, stream_id=replay_stream)
        # 2) Make a client to the interact with the process (optionall provide it a process to bind with)
        replay_client = ReplayClient(process_id)
        # 3) Start the agent (launch the process)
        self.data_retriever.start_replay_agent(self.replay_id)
        # 4) Start replaying...
        replay_client.start_replay()
        
        # Wait till we get some granules
        self.assertTrue(self.event.wait(5))
        
        # We got granules, pause the replay, clear the queue and allow the process to finish consuming
        replay_client.pause_replay()
        gevent.sleep(1)
        subscriber.xn.purge()
        self.event.clear()
        
        # Make sure there's no remaining messages being consumed
        self.assertFalse(self.event.wait(1))

        # Resume the replay and wait until we start getting granules again
        replay_client.resume_replay()
        self.assertTrue(self.event.wait(5))
    
        # Stop the replay, clear the queues
        replay_client.stop_replay()
        gevent.sleep(1)
        subscriber.xn.purge()
        self.event.clear()

        # Make sure that it did indeed stop
        self.assertFalse(self.event.wait(1))

        subscriber.stop()


    def test_retrieve_and_transform(self):
        # Make a simple dataset and start ingestion, pretty standard stuff.
        ctd_stream_id, route, stream_def_id, dataset_id = self.make_simple_dataset()
        self.start_ingestion(ctd_stream_id, dataset_id)

        # Stream definition for the salinity data
        salinity_pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        sal_stream_def_id = self.pubsub_management.create_stream_definition('sal data', parameter_dictionary_id=salinity_pdict_id)


        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
        rdt['time'] = np.arange(10)
        rdt['temp'] = np.random.randn(10) * 10 + 30
        rdt['conductivity'] = np.random.randn(10) * 2 + 10
        rdt['pressure'] = np.random.randn(10) * 1 + 12

        publisher = StandaloneStreamPublisher(ctd_stream_id, route)
        publisher.publish(rdt.to_granule())

        rdt['time'] = np.arange(10,20)

        publisher.publish(rdt.to_granule())


        self.wait_until_we_have_enough_granules(dataset_id, 20)

        granule = self.data_retriever.retrieve(dataset_id, 
                                             None,
                                             None, 
                                             'ion.processes.data.transforms.ctd.ctd_L2_salinity',
                                             'CTDL2SalinityTransformAlgorithm', 
                                             kwargs=dict(params=sal_stream_def_id))
        rdt = RecordDictionaryTool.load_from_granule(granule)
        for i in rdt['salinity']:
            self.assertNotEquals(i,0)
        self.streams.append(ctd_stream_id)
        self.stop_ingestion(ctd_stream_id)

    def test_last_granule(self):
        stream_id, route, stream_def_id, dataset_id = self.make_simple_dataset()
        self.start_ingestion(stream_id, dataset_id)

        self.publish_hifi(stream_id,route, 0)
        self.publish_hifi(stream_id,route, 1)
        

        self.wait_until_we_have_enough_granules(dataset_id,20) # I just need two


        success = False
        def verifier():
                replay_granule = self.data_retriever.retrieve_last_data_points(dataset_id, 10)

                rdt = RecordDictionaryTool.load_from_granule(replay_granule)

                comp = rdt['time'] == np.arange(10) + 10
                if not isinstance(comp,bool):
                    return comp.all()
                return False
        success = poll(verifier)

        self.assertTrue(success)

        success = False
        def verify_points():
                replay_granule = self.data_retriever.retrieve_last_data_points(dataset_id,5)

                rdt = RecordDictionaryTool.load_from_granule(replay_granule)

                comp = rdt['time'] == np.arange(15,20)
                if not isinstance(comp,bool):
                    return comp.all()
                return False
        success = poll(verify_points)

        self.assertTrue(success)
        self.streams.append(stream_id)
        self.stop_ingestion(stream_id)

    def test_replay_with_parameters(self):
        #--------------------------------------------------------------------------------
        # Create the configurations and the dataset
        #--------------------------------------------------------------------------------
        # Get a precompiled parameter dictionary with basic ctd fields
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict',id_only=True)
        context_ids = self.dataset_management.read_parameter_contexts(pdict_id, id_only=True)

        # Add a field that supports binary data input.
        bin_context = ParameterContext('binary',  param_type=ArrayType())
        context_ids.append(self.dataset_management.create_parameter_context('binary', bin_context.dump()))
        # Add another field that supports dictionary elements.
        rec_context = ParameterContext('records', param_type=RecordType())
        context_ids.append(self.dataset_management.create_parameter_context('records', rec_context.dump()))

        pdict_id = self.dataset_management.create_parameter_dictionary('replay_pdict', parameter_context_ids=context_ids, temporal_context='time')
        

        stream_def_id = self.pubsub_management.create_stream_definition('replay_stream', parameter_dictionary_id=pdict_id)
        
        stream_id, route  = self.pubsub_management.create_stream('replay_with_params', exchange_point=self.exchange_point_name, stream_definition_id=stream_def_id)
        config_id  = self.get_ingestion_config()
        dataset_id = self.create_dataset(pdict_id)
        self.ingestion_management.persist_data_stream(stream_id=stream_id, ingestion_configuration_id=config_id, dataset_id=dataset_id)

        dataset_monitor = DatasetMonitor(dataset_id)

        self.addCleanup(dataset_monitor.stop)

        self.publish_fake_data(stream_id, route)

        self.assertTrue(dataset_monitor.event.wait(30))

        query = {
            'start_time': 0 - 2208988800,
            'end_time':   20 - 2208988800,
            'stride_time' : 2,
            'parameters': ['time','temp']
        }
        retrieved_data = self.data_retriever.retrieve(dataset_id=dataset_id,query=query)

        rdt = RecordDictionaryTool.load_from_granule(retrieved_data)
        comp = np.arange(0,20,2) == rdt['time']
        self.assertTrue(comp.all(),'%s' % rdt.pretty_print())
        self.assertEquals(set(rdt.iterkeys()), set(['time','temp']))

        extents = self.dataset_management.dataset_extents(dataset_id=dataset_id, parameters=['time','temp'])
        self.assertTrue(extents['time']>=20)
        self.assertTrue(extents['temp']>=20)

        self.streams.append(stream_id)
        self.stop_ingestion(stream_id)
        

    def test_repersist_data(self):
        stream_id, route, stream_def_id, dataset_id = self.make_simple_dataset()
        self.start_ingestion(stream_id, dataset_id)
        self.publish_hifi(stream_id,route,0)
        self.publish_hifi(stream_id,route,1)
        self.wait_until_we_have_enough_granules(dataset_id,20)
        config_id = self.get_ingestion_config()
        self.ingestion_management.unpersist_data_stream(stream_id=stream_id,ingestion_configuration_id=config_id)
        self.ingestion_management.persist_data_stream(stream_id=stream_id,ingestion_configuration_id=config_id,dataset_id=dataset_id)
        self.publish_hifi(stream_id,route,2)
        self.publish_hifi(stream_id,route,3)
        self.wait_until_we_have_enough_granules(dataset_id,40)
        success = False
        with gevent.timeout.Timeout(5):
            while not success:

                replay_granule = self.data_retriever.retrieve(dataset_id)

                rdt = RecordDictionaryTool.load_from_granule(replay_granule)

                comp = rdt['time'] == np.arange(0,40)
                if not isinstance(comp,bool):
                    success = comp.all()
                gevent.sleep(1)

        self.assertTrue(success)
        self.streams.append(stream_id)
        self.stop_ingestion(stream_id)


    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Host requires file-system access to coverage files, CEI mode does not support.')
    def test_correct_time(self):

        # There are 2208988800 seconds between Jan 1 1900 and Jan 1 1970, i.e. 
        #  the conversion factor between unix and NTP time
        unix_now = np.floor(time.time())
        ntp_now  = unix_now + 2208988800 

        unix_ago = unix_now - 20
        ntp_ago  = unix_ago + 2208988800

        stream_id, route, stream_def_id, dataset_id = self.make_simple_dataset()
        coverage = DatasetManagementService._get_simplex_coverage(dataset_id)
        coverage.insert_timesteps(20)
        coverage.set_parameter_values('time', np.arange(ntp_ago,ntp_now))
        
        temporal_bounds = self.dataset_management.dataset_temporal_bounds(dataset_id)

        self.assertTrue( np.abs(temporal_bounds[0] - unix_ago) < 2)
        self.assertTrue( np.abs(temporal_bounds[1] - unix_now) < 2)


    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Host requires file-system access to coverage files, CEI mode does not support.')
    def test_empty_coverage_time(self):

        stream_id, route, stream_def_id, dataset_id = self.make_simple_dataset()
        coverage = DatasetManagementService._get_coverage(dataset_id)
        temporal_bounds = self.dataset_management.dataset_temporal_bounds(dataset_id)
        self.assertEquals([coverage.get_parameter_context('time').fill_value] *2, temporal_bounds)


    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Host requires file-system access to coverage files, CEI mode does not support.')
    def test_out_of_band_retrieve(self):
        # Setup the environemnt
        stream_id, route, stream_def_id, dataset_id = self.make_simple_dataset()
        self.start_ingestion(stream_id, dataset_id)
        
        # Fill the dataset
        self.publish_fake_data(stream_id, route)
        self.wait_until_we_have_enough_granules(dataset_id,40)

        # Retrieve the data
        granule = DataRetrieverService.retrieve_oob(dataset_id)
        rdt = RecordDictionaryTool.load_from_granule(granule)
        self.assertTrue((rdt['time'] == np.arange(40)).all())

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Host requires file-system access to coverage files, CEI mode does not support.')
    def test_retrieve_cache(self):
        DataRetrieverService._refresh_interval = 1
        datasets = [self.make_simple_dataset() for i in xrange(10)]
        for stream_id, route, stream_def_id, dataset_id in datasets:
            coverage = DatasetManagementService._get_simplex_coverage(dataset_id)
            coverage.insert_timesteps(10)
            coverage.set_parameter_values('time', np.arange(10))
            coverage.set_parameter_values('temp', np.arange(10))

        # Verify cache hit and refresh
        dataset_ids = [i[3] for i in datasets]
        self.assertTrue(dataset_ids[0] not in DataRetrieverService._retrieve_cache)
        DataRetrieverService._get_coverage(dataset_ids[0]) # Hit the chache
        cov, age = DataRetrieverService._retrieve_cache[dataset_ids[0]]
        # Verify that it was hit and it's now in there
        self.assertTrue(dataset_ids[0] in DataRetrieverService._retrieve_cache)

        gevent.sleep(DataRetrieverService._refresh_interval + 0.2)

        DataRetrieverService._get_coverage(dataset_ids[0]) # Hit the chache
        cov, age2 = DataRetrieverService._retrieve_cache[dataset_ids[0]]
        self.assertTrue(age2 != age)

        for dataset_id in dataset_ids:
            DataRetrieverService._get_coverage(dataset_id)
        
        self.assertTrue(dataset_ids[0] not in DataRetrieverService._retrieve_cache)

        stream_id, route, stream_def, dataset_id = datasets[0]
        self.start_ingestion(stream_id, dataset_id)
        DataRetrieverService._get_coverage(dataset_id)
        
        self.assertTrue(dataset_id in DataRetrieverService._retrieve_cache)

        DataRetrieverService._refresh_interval = 100
        self.publish_hifi(stream_id,route,1)
        self.wait_until_we_have_enough_granules(dataset_id, data_size=20)
            
 
        event = gevent.event.Event()
        with gevent.Timeout(20):
            while not event.wait(0.1):
                if dataset_id not in DataRetrieverService._retrieve_cache:
                    event.set()


        self.assertTrue(event.is_set())

        
    def publish_and_wait(self, dataset_id, granule):
        stream_ids, _ = self.resource_registry.find_objects(dataset_id, PRED.hasStream,id_only=True)
        stream_id=stream_ids[0]
        route = self.pubsub_management.read_stream_route(stream_id)
        publisher = StandaloneStreamPublisher(stream_id,route)
        dataset_monitor = DatasetMonitor(dataset_id)
        publisher.publish(granule)
        self.assertTrue(dataset_monitor.event.wait(10))

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Host requires file-system access to coverage files, CEI mode does not support.')
    def test_thorough_gap_analysis(self):
        dataset_id = self.test_ingestion_gap_analysis()
        vcov = DatasetManagementService._get_coverage(dataset_id)

        self.assertIsInstance(vcov,ViewCoverage)
        ccov = vcov.reference_coverage

        self.assertIsInstance(ccov, ComplexCoverage)
        self.assertEquals(len(ccov._reference_covs), 3)


    def test_ingestion_gap_analysis(self):
        stream_id, route, stream_def_id, dataset_id = self.make_simple_dataset()
        self.start_ingestion(stream_id, dataset_id)
        self.addCleanup(self.stop_ingestion, stream_id)

        connection1 = uuid4().hex
        connection2 = uuid4().hex

        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
        rdt['time'] = [0]
        rdt['temp'] = [0]
        self.publish_and_wait(dataset_id, rdt.to_granule(connection_id=connection1,connection_index='0'))
        rdt['time'] = [1]
        rdt['temp'] = [1]
        self.publish_and_wait(dataset_id, rdt.to_granule(connection_id=connection1,connection_index=1))
        rdt['time'] = [2]
        rdt['temp'] = [2]
        self.publish_and_wait(dataset_id, rdt.to_granule(connection_id=connection1,connection_index='3')) # Gap, missed message
        rdt['time'] = [3]
        rdt['temp'] = [3]
        self.publish_and_wait(dataset_id, rdt.to_granule(connection_id=connection2,connection_index='3')) # Gap, new connection
        rdt['time'] = [4]
        rdt['temp'] = [4]
        self.publish_and_wait(dataset_id, rdt.to_granule(connection_id=connection2,connection_index='4'))
        rdt['time'] = [5]
        rdt['temp'] = [5]
        self.publish_and_wait(dataset_id, rdt.to_granule(connection_id=connection2,connection_index=5))

        granule = self.data_retriever.retrieve(dataset_id)
        rdt = RecordDictionaryTool.load_from_granule(granule)
        np.testing.assert_array_equal(rdt['time'], np.arange(6))
        np.testing.assert_array_equal(rdt['temp'], np.arange(6))
        return dataset_id


    @unittest.skip('Outdated due to ingestion retry')
    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Host requires file-system access to coverage files, CEI mode does not support.')
    def test_ingestion_failover(self):
        stream_id, route, stream_def_id, dataset_id = self.make_simple_dataset()
        self.start_ingestion(stream_id, dataset_id)
        
        event = Event()

        def cb(*args, **kwargs):
            event.set()

        sub = EventSubscriber(event_type="ExceptionEvent", callback=cb, origin="stream_exception")
        sub.start()

        self.publish_fake_data(stream_id, route)
        self.wait_until_we_have_enough_granules(dataset_id, 40)
        
        file_path = DatasetManagementService._get_coverage_path(dataset_id)
        master_file = os.path.join(file_path, '%s_master.hdf5' % dataset_id)

        with open(master_file, 'w') as f:
            f.write('this will crash HDF')

        self.publish_hifi(stream_id, route, 5)


        self.assertTrue(event.wait(10))

        sub.stop()

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Host requires file-system access to coverage files, CEI mode does not support.')
    def test_coverage_types(self):
        # Make a simple dataset and start ingestion, pretty standard stuff.
        ctd_stream_id, route, stream_def_id, dataset_id = self.make_simple_dataset()
        cov = DatasetManagementService._get_coverage(dataset_id=dataset_id)
        self.assertIsInstance(cov, ViewCoverage)

        cov = DatasetManagementService._get_simplex_coverage(dataset_id=dataset_id)
        self.assertIsInstance(cov, SimplexCoverage)
示例#13
0
class TestGranulePublish(IonIntegrationTestCase):
    def setUp(self):
        # Start container
        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.pubsubclient = PubsubManagementServiceClient(
            node=self.container.node)
        self.dpclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.processdispatchclient = ProcessDispatcherServiceClient(
            node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(
            node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.dataset_management = DatasetManagementServiceClient()

    def create_logger(self, name, stream_id=''):

        # logger process
        producer_definition = ProcessDefinition(name=name + '_logger')
        producer_definition.executable = {
            'module': 'ion.processes.data.stream_granule_logger',
            'class': 'StreamGranuleLogger'
        }

        logger_procdef_id = self.processdispatchclient.create_process_definition(
            process_definition=producer_definition)
        configuration = {
            'process': {
                'stream_id': stream_id,
            }
        }
        pid = self.processdispatchclient.schedule_process(
            process_definition_id=logger_procdef_id,
            configuration=configuration)

        return pid

    #overriding trigger function here to use new granule
    def test_granule_publish(self):
        log.debug("test_granule_publish ")
        self.loggerpids = []

        #retrieve the param dict from the repository
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            'ctd_parsed_param_dict', id_only=True)
        stream_definition_id = self.pubsubclient.create_stream_definition(
            'parsed stream', parameter_dictionary_id=pdict_id)

        tdom, sdom = time_series_domain()

        dp_obj = IonObject(RT.DataProduct,
                           name=str(uuid.uuid4()),
                           description='ctd stream test',
                           temporal_domain=tdom.dump(),
                           spatial_domain=sdom.dump())

        data_product_id1 = self.dpclient.create_data_product(
            data_product=dp_obj, stream_definition_id=stream_definition_id)

        # Retrieve the id of the output stream of the out Data Product
        stream_ids, _ = self.rrclient.find_objects(data_product_id1,
                                                   PRED.hasStream, None, True)
        log.debug('test_granule_publish: Data product streams1 = %s',
                  stream_ids)

        pid = self.create_logger('ctd_parsed', stream_ids[0])
        self.loggerpids.append(pid)

        rdt = RecordDictionaryTool(stream_definition_id=stream_definition_id)

        #create the publisher from the stream route
        stream_route = self.pubsubclient.read_stream_route(stream_ids[0])
        publisher = StandaloneStreamPublisher(stream_ids[0], stream_route)

        # this is one sample from the ctd driver
        tomato = {
            "driver_timestamp":
            3555971105.1268806,
            "instrument_id":
            "ABC-123",
            "pkt_format_id":
            "JSON_Data",
            "pkt_version":
            1,
            "preferred_timestamp":
            "driver_timestamp",
            "quality_flag":
            "ok",
            "stream_name":
            "parsed",
            "values": [{
                "value": 22.9304,
                "value_id": "temp"
            }, {
                "value": 51.57381,
                "value_id": "conductivity"
            }, {
                "value": 915.551,
                "value_id": "pressure"
            }]
        }

        for value in tomato['values']:
            log.debug(
                "test_granule_publish: Looping tomato values  key: %s    val: %s ",
                str(value['value']), str(value['value_id']))

            if value['value_id'] in rdt:
                rdt[value['value_id']] = numpy.array([value['value']])
                log.debug(
                    "test_granule_publish: Added data item  %s  val: %s ",
                    str(value['value']), str(value['value_id']))

        g = rdt.to_granule()

        publisher.publish(g)

        gevent.sleep(3)

        for pid in self.loggerpids:
            self.processdispatchclient.cancel_process(pid)

        #--------------------------------------------------------------------------------
        # Cleanup data products
        #--------------------------------------------------------------------------------
        dp_ids, _ = self.rrclient.find_resources(restype=RT.DataProduct,
                                                 id_only=True)

        for dp_id in dp_ids:
            self.dataproductclient.delete_data_product(dp_id)
示例#14
0
class TestResourceRegistry(IonIntegrationTestCase):
    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2coi.yml')

        # Now create client to bank service
        self.resource_registry_service = ResourceRegistryServiceClient(
            node=self.container.node)

    def test_crud(self):
        # Some quick registry tests
        # Can't call new with fields that aren't defined in the object's schema
        with self.assertRaises(TypeError) as cm:
            IonObject("UserInfo", name="name", foo="bar")
        self.assertTrue(cm.exception.message ==
                        "__init__() got an unexpected keyword argument 'foo'")

        # Can't call new with fields that aren't defined in the object's schema
        with self.assertRaises(TypeError) as cm:
            IonObject("UserInfo", {"name": "name", "foo": "bar"})
        self.assertTrue(cm.exception.message ==
                        "__init__() got an unexpected keyword argument 'foo'")

        # Can't call new with fields that aren't defined in the object's schema
        with self.assertRaises(TypeError) as cm:
            IonObject("UserInfo", {"name": "name"}, foo="bar")
        self.assertTrue(cm.exception.message ==
                        "__init__() got an unexpected keyword argument 'foo'")

        # Instantiate an object
        obj = IonObject("UserInfo", name="name")

        # Can set attributes that aren't in the object's schema
        with self.assertRaises(AttributeError) as cm:
            setattr(obj, "foo", "bar")
        self.assertTrue(
            cm.exception.message == "'UserInfo' object has no attribute 'foo'")

        # Cam't call update with object that hasn't been persisted
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.update(obj)
        self.assertTrue(
            cm.exception.message.startswith(
                "Object does not have required '_id' or '_rev' attribute"))

        # Persist object and read it back
        obj_id, obj_rev = self.resource_registry_service.create(obj)
        read_obj = self.resource_registry_service.read(obj_id)

        # Cannot create object with _id and _rev fields pre-set
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create(read_obj)
        self.assertTrue(
            cm.exception.message.startswith("Doc must not have '_id'"))

        # Update object
        read_obj.name = "John Doe"
        self.resource_registry_service.update(read_obj)

        # Update should fail with revision mismatch
        with self.assertRaises(Conflict) as cm:
            self.resource_registry_service.update(read_obj)
        self.assertTrue(
            cm.exception.message.startswith(
                "Object not based on most current version"))

        # Re-read and update object
        read_obj = self.resource_registry_service.read(obj_id)
        self.resource_registry_service.update(read_obj)

        # Delete object
        self.resource_registry_service.delete(obj_id)

        # Make sure read, update and delete report error
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.read(obj_id)
        self.assertTrue(cm.exception.message.startswith("Object with id"))

        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.update(read_obj)
        self.assertTrue(cm.exception.message.startswith("Object with id"))

        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.delete(obj_id)
        self.assertTrue(cm.exception.message.startswith("Object with id"))

        # Owner creation tests
        user = IonObject("ActorIdentity", name='user')
        uid, _ = self.resource_registry_service.create(user)

        inst = IonObject("InstrumentDevice", name='instrument')
        iid, _ = self.resource_registry_service.create(
            inst, headers={'ion-actor-id': str(uid)})

        ids, _ = self.resource_registry_service.find_objects(iid,
                                                             PRED.hasOwner,
                                                             RT.ActorIdentity,
                                                             id_only=True)
        self.assertEquals(len(ids), 1)

        assoc = self.resource_registry_service.read(ids[0])
        self.resource_registry_service.delete(iid)

        with self.assertRaises(NotFound) as ex:
            assoc = self.resource_registry_service.read(iid)

    def test_lifecycle(self):
        att = IonObject("InstrumentDevice", name='mine', description='desc')

        rid, rev = self.resource_registry_service.create(att)

        att1 = self.resource_registry_service.read(rid)
        self.assertEquals(att1.name, att.name)
        self.assertEquals(att1.lcstate, LCS.DRAFT_PRIVATE)

        new_state = self.resource_registry_service.execute_lifecycle_transition(
            rid, LCE.PLAN)
        self.assertEquals(new_state, LCS.PLANNED_PRIVATE)

        att2 = self.resource_registry_service.read(rid)
        self.assertEquals(att2.lcstate, LCS.PLANNED_PRIVATE)

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.execute_lifecycle_transition(
                rid, LCE.UNANNOUNCE)
        self.assertTrue(
            "type=InstrumentDevice, lcstate=PLANNED_PRIVATE has no transition for event unannounce"
            in cm.exception.message)

        new_state = self.resource_registry_service.execute_lifecycle_transition(
            rid, LCE.DEVELOP)
        self.assertEquals(new_state, LCS.DEVELOPED_PRIVATE)

        self.assertRaises(
            iex.BadRequest,
            self.resource_registry_service.execute_lifecycle_transition,
            resource_id=rid,
            transition_event='NONE##')

        self.resource_registry_service.set_lifecycle_state(
            rid, LCS.INTEGRATED_PRIVATE)
        att1 = self.resource_registry_service.read(rid)
        self.assertEquals(att1.lcstate, LCS.INTEGRATED_PRIVATE)

    def test_association(self):
        # Instantiate ActorIdentity object
        actor_identity_obj = IonObject("ActorIdentity", name="name")
        actor_identity_obj_id, actor_identity_obj_rev = self.resource_registry_service.create(
            actor_identity_obj)
        read_actor_identity_obj = self.resource_registry_service.read(
            actor_identity_obj_id)

        # Instantiate UserInfo object
        user_info_obj = IonObject("UserInfo", name="name")
        user_info_obj_id, user_info_obj_rev = self.resource_registry_service.create(
            user_info_obj)
        read_user_info_obj = self.resource_registry_service.read(
            user_info_obj_id)

        # Test create failures
        with self.assertRaises(AttributeError) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj_id, PRED.bogus, user_info_obj_id)
        self.assertTrue(cm.exception.message == "bogus")

        # Predicate not provided
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj_id, None, user_info_obj_id)
        self.assertTrue(
            cm.exception.message == "Association must have all elements set")

        # Bad association type
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj_id, PRED.hasInfo, user_info_obj_id,
                'bogustype')
        self.assertTrue(
            cm.exception.message == "Unsupported assoc_type: bogustype")

        # Subject id or object not provided
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                None, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(
            cm.exception.message == "Association must have all elements set")

        # Object id or object not provided
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj_id, PRED.hasInfo, None)
        self.assertTrue(
            cm.exception.message == "Association must have all elements set")

        # Bad subject id
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.create_association(
                "bogus", PRED.hasInfo, user_info_obj_id)
        self.assertTrue(
            cm.exception.message == "Object with id bogus does not exist.")

        # Bad object id
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj_id, PRED.hasInfo, "bogus")
        self.assertTrue(
            cm.exception.message == "Object with id bogus does not exist.")

        # _id missing from subject
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(
            cm.exception.message == "Subject id or rev not available")

        # _id missing from object
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj_id, PRED.hasInfo, user_info_obj)
        self.assertTrue(
            cm.exception.message == "Object id or rev not available")

        # Wrong subject type
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                user_info_obj_id, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(cm.exception.message ==
                        "Illegal subject type UserInfo for predicate hasInfo")

        # Wrong object type
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj_id, PRED.hasInfo, actor_identity_obj_id)
        self.assertTrue(
            cm.exception.message ==
            "Illegal object type ActorIdentity for predicate hasInfo")

        # Create two different association types between the same subject and predicate
        assoc_id1, assoc_rev1 = self.resource_registry_service.create_association(
            actor_identity_obj_id, PRED.hasInfo, user_info_obj_id)

        # Read object, subject
        res_obj1 = self.resource_registry_service.read_object(
            actor_identity_obj_id, PRED.hasInfo, RT.UserInfo)
        self.assertEquals(res_obj1._id, user_info_obj_id)
        res_obj1 = self.resource_registry_service.read_object(
            actor_identity_obj_id, PRED.hasInfo, RT.UserInfo, id_only=True)
        self.assertEquals(res_obj1, user_info_obj_id)
        res_obj2 = self.resource_registry_service.read_subject(
            RT.ActorIdentity, PRED.hasInfo, user_info_obj_id)
        self.assertEquals(res_obj2._id, actor_identity_obj_id)
        res_obj2 = self.resource_registry_service.read_subject(
            RT.ActorIdentity, PRED.hasInfo, user_info_obj_id, id_only=True)
        self.assertEquals(res_obj2, actor_identity_obj_id)

        # Create a similar association to a specific revision
        # TODO: This is not a supported case so far
        assoc_id2, assoc_rev2 = self.resource_registry_service.create_association(
            actor_identity_obj_id, PRED.hasInfo, user_info_obj_id, "H2R")

        # Search for associations (good cases)
        ret1 = self.resource_registry_service.find_associations(
            actor_identity_obj_id, PRED.hasInfo, user_info_obj_id)
        ret2 = self.resource_registry_service.find_associations(
            actor_identity_obj_id, PRED.hasInfo)
        ret3 = self.resource_registry_service.find_associations(
            None, PRED.hasInfo)
        self.assertTrue(len(ret1) == len(ret2) == len(ret3))
        self.assertTrue(ret1[0]._id == ret2[0]._id == ret3[0]._id)

        ret1 = self.resource_registry_service.find_associations(
            actor_identity_obj_id, PRED.hasInfo, user_info_obj_id, None, False)
        ret2 = self.resource_registry_service.find_associations(
            actor_identity_obj_id, PRED.hasInfo, id_only=False)
        ret3 = self.resource_registry_service.find_associations(
            predicate=PRED.hasInfo, id_only=False)
        self.assertTrue(ret1 == ret2 == ret3)

        # Search for associations (good cases)
        ret1 = self.resource_registry_service.find_associations(
            read_actor_identity_obj, PRED.hasInfo, read_user_info_obj)
        ret2 = self.resource_registry_service.find_associations(
            read_actor_identity_obj, PRED.hasInfo)
        ret3 = self.resource_registry_service.find_associations(
            None, PRED.hasInfo)
        self.assertTrue(len(ret1) == len(ret2) == len(ret3))
        self.assertTrue(ret1[0]._id == ret2[0]._id == ret3[0]._id)

        ret1 = self.resource_registry_service.find_associations(
            actor_identity_obj_id, PRED.hasInfo, read_user_info_obj, None,
            True)
        ret2 = self.resource_registry_service.find_associations(
            actor_identity_obj_id, PRED.hasInfo, id_only=True)
        ret3 = self.resource_registry_service.find_associations(
            predicate=PRED.hasInfo, id_only=True)
        self.assertTrue(ret1 == ret2 == ret3)

        # Search for associations (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_associations(None, None, None)
        self.assertTrue(cm.exception.message == "Illegal parameters")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_associations(
                actor_identity_obj_id, None, None)
        self.assertTrue(cm.exception.message == "Illegal parameters")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_associations(
                None, None, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Illegal parameters")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_associations(
                actor_identity_obj, None, user_info_obj_id)
        self.assertTrue(
            cm.exception.message == "Object id not available in subject")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_associations(
                actor_identity_obj_id, None, user_info_obj)
        self.assertTrue(
            cm.exception.message == "Object id not available in object")

        # Find subjects (good cases)
        subj_ret1 = self.resource_registry_service.find_subjects(
            RT.ActorIdentity, PRED.hasInfo, user_info_obj_id, True)
        subj_ret2 = self.resource_registry_service.find_subjects(
            RT.ActorIdentity, PRED.hasInfo, read_user_info_obj, True)
        self.assertTrue(len(subj_ret1) == len(subj_ret2))
        self.assertTrue(subj_ret1[0] == subj_ret2[0])
        self.assertTrue(subj_ret1[1][0]._id == subj_ret2[1][0]._id)

        subj_ret3 = self.resource_registry_service.find_subjects(
            None, PRED.hasInfo, user_info_obj_id, True)
        subj_ret4 = self.resource_registry_service.find_subjects(
            None, None, read_user_info_obj, True)
        self.assertTrue(len(subj_ret3) == len(subj_ret4))
        self.assertTrue(subj_ret3[0] == subj_ret4[0])
        self.assertTrue(subj_ret3[1][0]._id == subj_ret4[1][0]._id)

        subj_ret5 = self.resource_registry_service.find_subjects(
            None, PRED.hasInfo, user_info_obj_id, False)
        subj_ret6 = self.resource_registry_service.find_subjects(
            None, None, read_user_info_obj, False)
        self.assertTrue(len(subj_ret5) == len(subj_ret6))
        self.assertTrue(subj_ret5[0][0]._id == subj_ret6[0][0]._id)
        self.assertTrue(subj_ret5[1][0]._id == subj_ret6[1][0]._id)

        # Find subjects (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_subjects(None, None, None)
        self.assertTrue(cm.exception.message == "Must provide object")

        with self.assertRaises(AttributeError) as cm:
            self.resource_registry_service.find_subjects(
                RT.UserCredentials, PRED.bogus, user_info_obj_id, True)
        self.assertTrue(cm.exception.message == "bogus")

        ret = self.resource_registry_service.find_subjects(
            RT.UserInfo, PRED.hasCredentials, user_info_obj_id, True)
        self.assertTrue(len(ret[0]) == 0)

        ret = self.resource_registry_service.find_subjects(
            RT.UserCredentials, PRED.hasInfo, user_info_obj_id, True)
        self.assertTrue(len(ret[0]) == 0)

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_subjects(
                RT.UserCredentials, PRED.hasInfo, user_info_obj, True)
        self.assertTrue(
            cm.exception.message == "Object id not available in object")

        # Find objects (good cases)
        subj_ret1 = self.resource_registry_service.find_objects(
            actor_identity_obj_id, PRED.hasInfo, RT.UserInfo, True)
        subj_ret2 = self.resource_registry_service.find_objects(
            read_actor_identity_obj, PRED.hasInfo, RT.UserInfo, True)
        self.assertTrue(len(subj_ret1) == len(subj_ret2))
        self.assertTrue(subj_ret1[0] == subj_ret2[0])
        self.assertTrue(subj_ret1[1][0]._id == subj_ret2[1][0]._id)

        subj_ret3 = self.resource_registry_service.find_objects(
            actor_identity_obj_id, PRED.hasInfo, None, True)
        subj_ret4 = self.resource_registry_service.find_objects(
            actor_identity_obj_id, None, None, True)
        self.assertTrue(len(subj_ret3) == len(subj_ret4))
        self.assertTrue(subj_ret3[0] == subj_ret4[0])
        self.assertTrue(subj_ret3[1][0]._id == subj_ret4[1][0]._id)

        subj_ret5 = self.resource_registry_service.find_objects(
            actor_identity_obj_id, PRED.hasInfo, None, False)
        subj_ret6 = self.resource_registry_service.find_objects(
            read_actor_identity_obj, None, None, False)
        self.assertTrue(len(subj_ret5) == len(subj_ret6))
        self.assertTrue(subj_ret5[0][0]._id == subj_ret6[0][0]._id)
        self.assertTrue(subj_ret5[1][0]._id == subj_ret6[1][0]._id)

        # Find objects (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_objects(None, None, None)
        self.assertTrue(cm.exception.message == "Must provide subject")

        with self.assertRaises(AttributeError) as cm:
            self.resource_registry_service.find_objects(
                actor_identity_obj_id, PRED.bogus, RT.UserCredentials, True)
        self.assertTrue(cm.exception.message == "bogus")

        ret = self.resource_registry_service.find_objects(
            actor_identity_obj_id, PRED.hasCredentials, RT.ActorIdentity, True)
        self.assertTrue(len(ret[0]) == 0)

        ret = self.resource_registry_service.find_objects(
            actor_identity_obj_id, PRED.hasInfo, RT.UserCredentials, True)
        self.assertTrue(len(ret[0]) == 0)

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_objects(
                actor_identity_obj, PRED.hasInfo, RT.UserInfo, True)
        self.assertTrue(
            cm.exception.message == "Object id not available in subject")

        # Get association (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.get_association(None, None, None)
        self.assertTrue(cm.exception.message == "Illegal parameters")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.get_association(
                actor_identity_obj_id, None, None)
        self.assertTrue(cm.exception.message == "Illegal parameters")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.get_association(
                None, None, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Illegal parameters")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.get_association(
                actor_identity_obj, None, user_info_obj_id)
        self.assertTrue(
            cm.exception.message == "Object id not available in subject")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.get_association(
                actor_identity_obj_id, None, user_info_obj)
        self.assertTrue(
            cm.exception.message == "Object id not available in object")

        # Delete one of the associations
        self.resource_registry_service.delete_association(assoc_id2)

        assoc = self.resource_registry_service.get_association(
            actor_identity_obj_id, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(assoc._id == assoc_id1)

        # Delete (bad cases)
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.delete_association("bogus")
        self.assertTrue(
            cm.exception.message == "Object with id bogus does not exist.")

        # Delete other association
        self.resource_registry_service.delete_association(assoc_id1)

        # Delete resources
        self.resource_registry_service.delete(actor_identity_obj_id)
        self.resource_registry_service.delete(user_info_obj_id)

    def test_find_resources(self):
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_resources(
                RT.UserInfo, LCS.DRAFT, "name", False)
        self.assertTrue(
            cm.exception.message == "find by name does not support lcstate")

        ret = self.resource_registry_service.find_resources(
            RT.UserInfo, None, "name", False)
        self.assertTrue(len(ret[0]) == 0)

        # Instantiate an object
        obj = IonObject("InstrumentDevice", name="name")

        # Persist object and read it back
        obj_id, obj_rev = self.resource_registry_service.create(obj)
        read_obj = self.resource_registry_service.read(obj_id)

        ret = self.resource_registry_service.find_resources(
            RT.InstrumentDevice, None, "name", False)
        self.assertTrue(len(ret[0]) == 1)
        self.assertTrue(ret[0][0]._id == read_obj._id)

        ret = self.resource_registry_service.find_resources(
            RT.InstrumentDevice, LCS.DRAFT, None, False)
        self.assertTrue(len(ret[0]) == 1)
        self.assertTrue(ret[0][0]._id == read_obj._id)

    def test_attach(self):

        binary = "\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x10\x00\x00\x00\x10\x08\x03\x00\x00\x00(-\x0fS\x00\x00\x00\x03sBIT\x08\x08\x08\xdb\xe1O\xe0\x00\x00\x00~PLTEf3\x00\xfc\xf7\xe0\xee\xcc\x00\xd3\xa0\x00\xcc\x99\x00\xec\xcdc\x9fl\x00\xdd\xb2\x00\xff\xff\xff|I\x00\xf9\xdb\x00\xdd\xb5\x19\xd9\xad\x10\xb6\x83\x00\xf8\xd6\x00\xf2\xc5\x00\xd8\xab\x00n;\x00\xff\xcc\x00\xd6\xa4\t\xeb\xb8\x00\x83Q\x00\xadz\x00\xff\xde\x00\xff\xd6\x00\xd6\xa3\x00\xdf\xaf\x00\xde\xad\x10\xbc\x8e\x00\xec\xbe\x00\xec\xd4d\xff\xe3\x00tA\x00\xf6\xc4\x00\xf6\xce\x00\xa5u\x00\xde\xa5\x00\xf7\xbd\x00\xd6\xad\x08\xdd\xaf\x19\x8cR\x00\xea\xb7\x00\xee\xe9\xdf\xc5\x00\x00\x00\tpHYs\x00\x00\n\xf0\x00\x00\n\xf0\x01B\xac4\x98\x00\x00\x00\x1ctEXtSoftware\x00Adobe Fireworks CS4\x06\xb2\xd3\xa0\x00\x00\x00\x15tEXtCreation Time\x0029/4/09Oq\xfdE\x00\x00\x00\xadIDAT\x18\x95M\x8f\x8d\x0e\x820\x0c\x84;ZdC~f\x07\xb2\x11D\x86\x89\xe8\xfb\xbf\xa0+h\xe2\x97\\\xd2^\x93\xb6\x07:1\x9f)q\x9e\xa5\x06\xad\xd5\x13\x8b\xac,\xb3\x02\x9d\x12C\xa1-\xef;M\x08*\x19\xce\x0e?\x1a\xeb4\xcc\xd4\x0c\x831\x87V\xca\xa1\x1a\xd3\x08@\xe4\xbd\xb7\x15P;\xc8\xd4{\x91\xbf\x11\x90\xffg\xdd\x8di\xfa\xb6\x0bs2Z\xff\xe8yg2\xdc\x11T\x96\xc7\x05\xa5\xef\x96+\xa7\xa59E\xae\xe1\x84cm^1\xa6\xb3\xda\x85\xc8\xd8/\x17se\x0eN^'\x8c\xc7\x8e\x88\xa8\xf6p\x8e\xc2;\xc6.\xd0\x11.\x91o\x12\x7f\xcb\xa5\xfe\x00\x89]\x10:\xf5\x00\x0e\xbf\x00\x00\x00\x00IEND\xaeB`\x82"

        # Owner creation tests
        instrument = IonObject("InstrumentDevice", name='instrument')
        iid, _ = self.resource_registry_service.create(instrument)

        att = Attachment(content=binary, attachment_type=AttachmentType.BLOB)
        aid1 = self.resource_registry_service.create_attachment(iid, att)

        att1 = self.resource_registry_service.read_attachment(aid1)
        self.assertEquals(binary, att1.content)

        import base64
        att = Attachment(content=base64.encodestring(binary),
                         attachment_type=AttachmentType.ASCII)
        aid2 = self.resource_registry_service.create_attachment(iid, att)

        att1 = self.resource_registry_service.read_attachment(aid2)
        self.assertEquals(binary, base64.decodestring(att1.content))

        att_ids = self.resource_registry_service.find_attachments(iid,
                                                                  id_only=True)
        self.assertEquals(att_ids, [aid1, aid2])

        att_ids = self.resource_registry_service.find_attachments(
            iid, id_only=True, descending=True)
        self.assertEquals(att_ids, [aid2, aid1])

        att_ids = self.resource_registry_service.find_attachments(
            iid, id_only=True, descending=True, limit=1)
        self.assertEquals(att_ids, [aid2])

        atts = self.resource_registry_service.find_attachments(iid,
                                                               id_only=False,
                                                               limit=1)
        self.assertEquals(atts[0].content, att1.content)

        self.resource_registry_service.delete_attachment(aid1)

        att_ids = self.resource_registry_service.find_attachments(iid,
                                                                  id_only=True)
        self.assertEquals(att_ids, [aid2])

        self.resource_registry_service.delete_attachment(aid2)

        att_ids = self.resource_registry_service.find_attachments(iid,
                                                                  id_only=True)
        self.assertEquals(att_ids, [])

    def test_read_mult(self):
        test_resource1_id, _ = self.resource_registry_service.create(
            Resource(name='test1'))
        test_resource2_id, _ = self.resource_registry_service.create(
            Resource(name='test2'))

        res_list = [test_resource1_id, test_resource2_id]

        objects = self.resource_registry_service.read_mult(res_list)

        for o in objects:
            self.assertIsInstance(o, Resource)
            self.assertTrue(o._id in res_list)

    def test_find_associations_mult(self):
        dp = DataProcess()
        transform = Transform()
        pd = ProcessDefinition()

        dp_id, _ = self.resource_registry_service.create(dp)
        transform_id, _ = self.resource_registry_service.create(transform)
        pd_id, _ = self.resource_registry_service.create(pd)

        self.resource_registry_service.create_association(
            subject=dp_id, object=transform_id, predicate=PRED.hasTransform)
        self.resource_registry_service.create_association(
            subject=transform_id,
            object=pd_id,
            predicate=PRED.hasProcessDefinition)

        results, _ = self.resource_registry_service.find_associations_mult(
            subjects=[dp_id], id_only=True)
        self.assertTrue(results == [transform_id])

        results, _ = self.resource_registry_service.find_associations_mult(
            subjects=[dp_id, transform_id], id_only=True)
        results.sort()
        correct = [transform_id, pd_id]
        correct.sort()
        self.assertTrue(results == correct)
示例#15
0
class NotificationWorker(TransformEventListener):
    """
    Instances of this class acts as a Notification Worker.
    """
    def on_init(self):
        self.user_info = {}
        self.resource_registry = ResourceRegistryServiceClient()
        self.q = gevent.queue.Queue()

        super(NotificationWorker, self).on_init()

    def test_hook(self, user_info, reverse_user_info):
        '''
        This method exists only to facilitate the testing of the reload of the user_info dictionary
        '''
        self.q.put((user_info, reverse_user_info))

    def on_start(self):
        super(NotificationWorker, self).on_start()

        self.reverse_user_info = None
        self.user_info = None

        #------------------------------------------------------------------------------------
        # Start by loading the user info and reverse user info dictionaries
        #------------------------------------------------------------------------------------

        try:
            self.user_info = self.load_user_info()
            self.reverse_user_info = calculate_reverse_user_info(
                self.user_info)

            log.debug(
                "On start up, notification workers loaded the following user_info dictionary: %s"
                % self.user_info)
            log.debug("The calculated reverse user info: %s" %
                      self.reverse_user_info)

        except NotFound as exc:
            if exc.message.find('users_index') > -1:
                log.warning(
                    "Notification workers found on start up that users_index have not been loaded yet."
                )
            else:
                raise NotFound(exc.message)

        #------------------------------------------------------------------------------------
        # Create an event subscriber for Reload User Info events
        #------------------------------------------------------------------------------------

        def reload_user_info(event_msg, headers):
            '''
            Callback method for the subscriber to ReloadUserInfoEvent
            '''

            notification_id = event_msg.notification_id
            log.debug(
                "(Notification worker received a ReloadNotificationEvent. The relevant notification_id is %s"
                % notification_id)

            try:
                self.user_info = self.load_user_info()
            except NotFound:
                log.warning("ElasticSearch has not yet loaded the user_index.")

            self.reverse_user_info = calculate_reverse_user_info(
                self.user_info)
            self.test_hook(self.user_info, self.reverse_user_info)

            log.debug("After a reload, the user_info: %s" % self.user_info)
            log.debug("The recalculated reverse_user_info: %s" %
                      self.reverse_user_info)

        # the subscriber for the ReloadUSerInfoEvent
        self.reload_user_info_subscriber = EventSubscriber(
            event_type=OT.ReloadUserInfoEvent,
            origin='UserNotificationService',
            callback=reload_user_info)

        self.add_endpoint(self.reload_user_info_subscriber)

    def process_event(self, msg, headers):
        """
        Callback method for the subscriber listening for all events
        """
        #------------------------------------------------------------------------------------
        # From the reverse user info dict find out which users have subscribed to that event
        #------------------------------------------------------------------------------------

        user_ids = []
        if self.reverse_user_info:
            user_ids = check_user_notification_interest(
                event=msg, reverse_user_info=self.reverse_user_info)

            #log.debug("Notification worker found interested users %s" % user_ids)

        #------------------------------------------------------------------------------------
        # Send email to the users
        #------------------------------------------------------------------------------------

        for user_id in user_ids:
            msg_recipient = self.user_info[user_id]['user_contact'].email
            self.smtp_client = setting_up_smtp_client()
            send_email(event=msg,
                       msg_recipient=msg_recipient,
                       smtp_client=self.smtp_client,
                       rr_client=self.resource_registry)
            self.smtp_client.quit()

    def get_user_notifications(self, user_info_id=''):
        """
        Get the notification request objects that are subscribed to by the user

        @param user_info_id str

        @retval notifications list of NotificationRequest objects
        """
        notifications = []
        user_notif_req_objs, _ = self.resource_registry.find_objects(
            subject=user_info_id,
            predicate=PRED.hasNotification,
            object_type=RT.NotificationRequest,
            id_only=False)

        #log.debug("Got %s notifications, for the user: %s", len(user_notif_req_objs), user_info_id)

        for notif in user_notif_req_objs:
            # do not include notifications that have expired
            if notif.temporal_bounds.end_datetime == '':
                notifications.append(notif)

        return notifications

    def load_user_info(self):
        '''
        Method to load the user info dictionary used by the notification workers and the UNS

        @retval user_info dict
        '''

        users, _ = self.resource_registry.find_resources(restype=RT.UserInfo)

        user_info = {}

        if not users:
            return {}

        for user in users:
            notifications_disabled = False
            notifications_daily_digest = False

            notifications = self.get_user_notifications(user_info_id=user)

            for variable in user.variables:
                if type(variable) is dict and variable.has_key('name'):

                    if variable['name'] == 'notifications_daily_digest':
                        notifications_daily_digest = variable['value']

                    if variable['name'] == 'notifications_disabled':
                        notifications_disabled = variable['value']
                else:
                    log.warning(
                        'Invalid variables attribute on UserInfo instance. UserInfo: %s',
                        user)

            user_info[user._id] = {
                'user_contact': user.contact,
                'notifications': notifications,
                'notifications_daily_digest': notifications_daily_digest,
                'notifications_disabled': notifications_disabled
            }

        return user_info
class TestOrgManagementServiceInt(IonIntegrationTestCase):
    def setUp(self):

        # Start container
        self._start_container()
        self.container.start_rel_from_url("res/deploy/r2coi.yml")

        self.resource_registry = ResourceRegistryServiceClient(node=self.container.node)
        self.org_management_service = OrgManagementServiceClient(node=self.container.node)

    def test_org_crud(self):

        with self.assertRaises(BadRequest) as br:
            self.org_management_service.create_org(
                IonObject("Org", {"name": "Test Facility", "org_governance_name": "Test Facility"})
            )
        self.assertTrue("can only contain alphanumeric and underscore characters" in br.exception.message)

        with self.assertRaises(BadRequest):
            self.org_management_service.create_org()

        org_obj = IonObject("Org", {"name": "Test Facility"})
        org_id = self.org_management_service.create_org(org_obj)
        self.assertNotEqual(org_id, None)

        org = None
        org = self.org_management_service.read_org(org_id)
        self.assertNotEqual(org, None)
        self.assertEqual(org.org_governance_name, "Test_Facility")

        # Check that the roles got associated to them
        role_list = self.org_management_service.find_org_roles(org_id)
        self.assertEqual(len(role_list), 2)

        with self.assertRaises(BadRequest):
            self.org_management_service.update_org()
        org.name = "Updated Test Facility"
        self.org_management_service.update_org(org)

        org = None
        org = self.org_management_service.read_org(org_id)
        self.assertNotEqual(org, None)
        self.assertEqual(org.name, "Updated Test Facility")
        self.assertEqual(org.org_governance_name, "Test_Facility")

        user_role = self.org_management_service.find_org_role_by_name(org_id, ORG_MANAGER_ROLE)
        self.assertNotEqual(user_role, None)

        self.org_management_service.remove_user_role(org_id, ORG_MANAGER_ROLE)
        with self.assertRaises(BadRequest) as cm:
            user_role = self.org_management_service.find_org_role_by_name(org_id, ORG_MANAGER_ROLE)
        self.assertIn("The User Role 'ORG_MANAGER' does not exist for this Org", cm.exception.message)

        with self.assertRaises(BadRequest):
            self.org_management_service.delete_org()
        self.org_management_service.delete_org(org_id)

        with self.assertRaises(NotFound) as cm:
            self.org_management_service.read_org(org_id)
        self.assertIn("does not exist", cm.exception.message)

        with self.assertRaises(NotFound) as cm:
            self.org_management_service.delete_org(org_id)
        self.assertIn("does not exist", cm.exception.message)

    def test_org_affiliation(self):

        root_org = None
        root_org = self.org_management_service.find_org()
        self.assertNotEqual(root_org, None)

        org_obj = IonObject("Org", {"name": "TestFacility"})
        org_id = self.org_management_service.create_org(org_obj)
        self.assertNotEqual(org_id, None)

        ret = self.org_management_service.affiliate_org(root_org._id, org_id)
        self.assertTrue(ret)

        ret = self.org_management_service.unaffiliate_org(root_org._id, org_id)
        self.assertTrue(ret)

    def test_find_org_containers(self):

        root_org = None
        root_org = self.org_management_service.find_org()
        self.assertNotEqual(root_org, None)

        containers = self.org_management_service.find_org_containers(root_org._id)

        all_containers, _ = self.resource_registry.find_resources(restype=RT.CapabilityContainer, id_only=True)

        self.assertEqual(len(containers), len(all_containers))
示例#17
0
class PubsubManagementIntTest(IonIntegrationTestCase):

    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        self.pubsub_management       = PubsubManagementServiceClient()
        self.resource_registry       = ResourceRegistryServiceClient()
        self.dataset_management      = DatasetManagementServiceClient()
        self.data_product_management = DataProductManagementServiceClient()

        self.pdicts = {}
        self.queue_cleanup = list()
        self.exchange_cleanup = list()
        self.context_ids = set()

    def tearDown(self):
        for queue in self.queue_cleanup:
            xn = self.container.ex_manager.create_xn_queue(queue)
            xn.delete()
        for exchange in self.exchange_cleanup:
            xp = self.container.ex_manager.create_xp(exchange)
            xp.delete()

        self.cleanup_contexts()
    
    def test_stream_def_crud(self):

        # Test Creation
        pdict = DatasetManagementService.get_parameter_dictionary_by_name('ctd_parsed_param_dict')
        stream_definition_id = self.pubsub_management.create_stream_definition('ctd parsed', parameter_dictionary_id=pdict.identifier)
        self.addCleanup(self.pubsub_management.delete_stream_definition, stream_definition_id)

        # Make sure there is an assoc
        self.assertTrue(self.resource_registry.find_associations(subject=stream_definition_id, predicate=PRED.hasParameterDictionary, object=pdict.identifier, id_only=True))

        # Test Reading
        stream_definition = self.pubsub_management.read_stream_definition(stream_definition_id)
        self.assertTrue(PubsubManagementService._compare_pdicts(pdict.dump(), stream_definition.parameter_dictionary))


        # Test comparisons
        in_stream_definition_id = self.pubsub_management.create_stream_definition('L0 products', parameter_dictionary_id=pdict.identifier, available_fields=['time','temp','conductivity','pressure'])
        self.addCleanup(self.pubsub_management.delete_stream_definition, in_stream_definition_id)

        out_stream_definition_id = in_stream_definition_id
        self.assertTrue(self.pubsub_management.compare_stream_definition(in_stream_definition_id, out_stream_definition_id))
        self.assertTrue(self.pubsub_management.compatible_stream_definitions(in_stream_definition_id, out_stream_definition_id))

        out_stream_definition_id = self.pubsub_management.create_stream_definition('L2 Products', parameter_dictionary_id=pdict.identifier, available_fields=['time','salinity','density'])
        self.addCleanup(self.pubsub_management.delete_stream_definition, out_stream_definition_id)
        self.assertFalse(self.pubsub_management.compare_stream_definition(in_stream_definition_id, out_stream_definition_id))

        self.assertTrue(self.pubsub_management.compatible_stream_definitions(in_stream_definition_id, out_stream_definition_id))

    @unittest.skip('Needs to be refactored for cleanup')
    def test_validate_stream_defs(self):
        self.addCleanup(self.cleanup_contexts)
        #test no input 
        incoming_pdict_id = self._get_pdict(['TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0'])
        outgoing_pdict_id = self._get_pdict(['DENSITY', 'PRACSAL', 'TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1'])
        available_fields_in = []
        available_fields_out = []
        incoming_stream_def_id = self.pubsub_management.create_stream_definition('in_sd_0', parameter_dictionary_id=incoming_pdict_id, available_fields=available_fields_in)
        self.addCleanup(self.pubsub_management.delete_stream_definition, incoming_stream_def_id)
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition('out_sd_0', parameter_dictionary_id=outgoing_pdict_id, available_fields=available_fields_out)
        self.addCleanup(self.pubsub_management.delete_stream_definition, outgoing_stream_def_id)
        result = self.pubsub_management.validate_stream_defs(incoming_stream_def_id, outgoing_stream_def_id)
        self.assertFalse(result)
    
        #test input with no output
        incoming_pdict_id = self._get_pdict(['TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0'])
        outgoing_pdict_id = self._get_pdict(['DENSITY', 'PRACSAL', 'TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1'])
        available_fields_in = ['TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0']
        available_fields_out = []
        incoming_stream_def_id = self.pubsub_management.create_stream_definition('in_sd_1', parameter_dictionary_id=incoming_pdict_id, available_fields=available_fields_in)
        self.addCleanup(self.pubsub_management.delete_stream_definition, incoming_stream_def_id)
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition('out_sd_1', parameter_dictionary_id=outgoing_pdict_id, available_fields=available_fields_out)
        self.addCleanup(self.pubsub_management.delete_stream_definition, outgoing_stream_def_id)
        result = self.pubsub_management.validate_stream_defs(incoming_stream_def_id, outgoing_stream_def_id)
        self.assertTrue(result)
        
        #test available field missing parameter context definition -- missing PRESWAT_L0
        incoming_pdict_id = self._get_pdict(['TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0'])
        outgoing_pdict_id = self._get_pdict(['DENSITY', 'PRACSAL', 'TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1'])
        available_fields_in = ['TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0']
        available_fields_out = ['DENSITY']
        incoming_stream_def_id = self.pubsub_management.create_stream_definition('in_sd_2', parameter_dictionary_id=incoming_pdict_id, available_fields=available_fields_in)
        self.addCleanup(self.pubsub_management.delete_stream_definition, incoming_stream_def_id)
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition('out_sd_2', parameter_dictionary_id=outgoing_pdict_id, available_fields=available_fields_out)
        self.addCleanup(self.pubsub_management.delete_stream_definition, outgoing_stream_def_id)
        result = self.pubsub_management.validate_stream_defs(incoming_stream_def_id, outgoing_stream_def_id)
        self.assertFalse(result)

        #test l1 from l0
        incoming_pdict_id = self._get_pdict(['TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0'])
        outgoing_pdict_id = self._get_pdict(['TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1'])
        available_fields_in = ['TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0']
        available_fields_out = ['TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1']
        incoming_stream_def_id = self.pubsub_management.create_stream_definition('in_sd_3', parameter_dictionary_id=incoming_pdict_id, available_fields=available_fields_in)
        self.addCleanup(self.pubsub_management.delete_stream_definition, incoming_stream_def_id)
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition('out_sd_3', parameter_dictionary_id=outgoing_pdict_id, available_fields=available_fields_out)
        self.addCleanup(self.pubsub_management.delete_stream_definition, outgoing_stream_def_id)
        result = self.pubsub_management.validate_stream_defs(incoming_stream_def_id, outgoing_stream_def_id)
        self.assertTrue(result)

        #test l2 from l0
        incoming_pdict_id = self._get_pdict(['TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0'])
        outgoing_pdict_id = self._get_pdict(['TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1', 'DENSITY', 'PRACSAL'])
        available_fields_in = ['TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0']
        available_fields_out = ['DENSITY', 'PRACSAL']
        incoming_stream_def_id = self.pubsub_management.create_stream_definition('in_sd_4', parameter_dictionary_id=incoming_pdict_id, available_fields=available_fields_in)
        self.addCleanup(self.pubsub_management.delete_stream_definition, incoming_stream_def_id)
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition('out_sd_4', parameter_dictionary_id=outgoing_pdict_id, available_fields=available_fields_out)
        self.addCleanup(self.pubsub_management.delete_stream_definition, outgoing_stream_def_id)
        result = self.pubsub_management.validate_stream_defs(incoming_stream_def_id, outgoing_stream_def_id)
        self.assertTrue(result)
        
        #test Ln from L0
        incoming_pdict_id = self._get_pdict(['TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0'])
        outgoing_pdict_id = self._get_pdict(['DENSITY','PRACSAL','TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1'])
        available_fields_in = ['TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0']
        available_fields_out = ['DENSITY', 'PRACSAL', 'TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1']
        incoming_stream_def_id = self.pubsub_management.create_stream_definition('in_sd_5', parameter_dictionary_id=incoming_pdict_id, available_fields=available_fields_in)
        self.addCleanup(self.pubsub_management.delete_stream_definition, incoming_stream_def_id)
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition('out_sd_5', parameter_dictionary_id=outgoing_pdict_id, available_fields=available_fields_out)
        self.addCleanup(self.pubsub_management.delete_stream_definition, outgoing_stream_def_id)
        result = self.pubsub_management.validate_stream_defs(incoming_stream_def_id, outgoing_stream_def_id)
        self.assertTrue(result)
        
        #test L2 from L1
        incoming_pdict_id = self._get_pdict(['TIME', 'LAT', 'LON', 'TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1'])
        outgoing_pdict_id = self._get_pdict(['DENSITY','PRACSAL','TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1'])
        available_fields_in = ['TIME', 'LAT', 'LON', 'TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1']
        available_fields_out = ['DENSITY', 'PRACSAL']
        incoming_stream_def_id = self.pubsub_management.create_stream_definition('in_sd_6', parameter_dictionary_id=incoming_pdict_id, available_fields=available_fields_in)
        self.addCleanup(self.pubsub_management.delete_stream_definition, incoming_stream_def_id)
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition('out_sd_6', parameter_dictionary_id=outgoing_pdict_id, available_fields=available_fields_out)
        self.addCleanup(self.pubsub_management.delete_stream_definition, outgoing_stream_def_id)
        result = self.pubsub_management.validate_stream_defs(incoming_stream_def_id, outgoing_stream_def_id)
        self.assertTrue(result)
        
        #test L1 from L0 missing L0
        incoming_pdict_id = self._get_pdict(['TIME', 'LAT', 'LON'])
        outgoing_pdict_id = self._get_pdict(['TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1'])
        available_fields_in = ['TIME', 'LAT', 'LON']
        available_fields_out = ['DENSITY', 'PRACSAL']
        incoming_stream_def_id = self.pubsub_management.create_stream_definition('in_sd_7', parameter_dictionary_id=incoming_pdict_id, available_fields=available_fields_in)
        self.addCleanup(self.pubsub_management.delete_stream_definition, incoming_stream_def_id)
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition('out_sd_7', parameter_dictionary_id=outgoing_pdict_id, available_fields=available_fields_out)
        self.addCleanup(self.pubsub_management.delete_stream_definition, outgoing_stream_def_id)
        result = self.pubsub_management.validate_stream_defs(incoming_stream_def_id, outgoing_stream_def_id)
        self.assertFalse(result)
        
        #test L2 from L0 missing L0
        incoming_pdict_id = self._get_pdict(['TIME', 'LAT', 'LON'])
        outgoing_pdict_id = self._get_pdict(['DENSITY', 'PRACSAL', 'TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1'])
        available_fields_in = ['TIME', 'LAT', 'LON']
        available_fields_out = ['DENSITY', 'PRACSAL']
        incoming_stream_def_id = self.pubsub_management.create_stream_definition('in_sd_8', parameter_dictionary_id=incoming_pdict_id, available_fields=available_fields_in)
        self.addCleanup(self.pubsub_management.delete_stream_definition, incoming_stream_def_id)
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition('out_sd_8', parameter_dictionary_id=outgoing_pdict_id, available_fields=available_fields_out)
        self.addCleanup(self.pubsub_management.delete_stream_definition, outgoing_stream_def_id)
        result = self.pubsub_management.validate_stream_defs(incoming_stream_def_id, outgoing_stream_def_id)
        self.assertFalse(result)
        
        #test L2 from L0 missing L1
        incoming_pdict_id = self._get_pdict(['TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0'])
        outgoing_pdict_id = self._get_pdict(['DENSITY', 'PRACSAL'])
        available_fields_in = ['TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0']
        available_fields_out = ['DENSITY', 'PRACSAL']
        incoming_stream_def_id = self.pubsub_management.create_stream_definition('in_sd_9', parameter_dictionary_id=incoming_pdict_id, available_fields=available_fields_in)
        self.addCleanup(self.pubsub_management.delete_stream_definition, incoming_stream_def_id)
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition('out_sd_9', parameter_dictionary_id=outgoing_pdict_id, available_fields=available_fields_out)
        self.addCleanup(self.pubsub_management.delete_stream_definition, outgoing_stream_def_id)
        result = self.pubsub_management.validate_stream_defs(incoming_stream_def_id, outgoing_stream_def_id)
        self.assertFalse(result)
    
    def publish_on_stream(self, stream_id, msg):
        stream = self.pubsub_management.read_stream(stream_id)
        stream_route = stream.stream_route
        publisher = StandaloneStreamPublisher(stream_id=stream_id, stream_route=stream_route)
        publisher.publish(msg)

    def test_stream_crud(self):
        stream_def_id = self.pubsub_management.create_stream_definition('test_definition', stream_type='stream')
        self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id)
        topic_id = self.pubsub_management.create_topic(name='test_topic', exchange_point='test_exchange')
        self.addCleanup(self.pubsub_management.delete_topic, topic_id)
        self.exchange_cleanup.append('test_exchange')
        topic2_id = self.pubsub_management.create_topic(name='another_topic', exchange_point='outside')
        self.addCleanup(self.pubsub_management.delete_topic, topic2_id)
        stream_id, route = self.pubsub_management.create_stream(name='test_stream', topic_ids=[topic_id, topic2_id], exchange_point='test_exchange', stream_definition_id=stream_def_id)

        topics, assocs = self.resource_registry.find_objects(subject=stream_id, predicate=PRED.hasTopic, id_only=True)
        self.assertEquals(topics,[topic_id])

        defs, assocs = self.resource_registry.find_objects(subject=stream_id, predicate=PRED.hasStreamDefinition, id_only=True)
        self.assertTrue(len(defs))

        stream = self.pubsub_management.read_stream(stream_id)
        self.assertEquals(stream.name,'test_stream')
        self.pubsub_management.delete_stream(stream_id)
        
        with self.assertRaises(NotFound):
            self.pubsub_management.read_stream(stream_id)

        defs, assocs = self.resource_registry.find_objects(subject=stream_id, predicate=PRED.hasStreamDefinition, id_only=True)
        self.assertFalse(len(defs))

        topics, assocs = self.resource_registry.find_objects(subject=stream_id, predicate=PRED.hasTopic, id_only=True)
        self.assertFalse(len(topics))



    def test_data_product_subscription(self):
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        stream_def_id = self.pubsub_management.create_stream_definition('ctd parsed', parameter_dictionary_id=pdict_id)
        self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id)

        tdom, sdom = time_series_domain()
        dp = DataProduct(name='ctd parsed')
        dp.spatial_domain = sdom.dump()
        dp.temporal_domain = tdom.dump()

        data_product_id = self.data_product_management.create_data_product(data_product=dp, stream_definition_id=stream_def_id)
        self.addCleanup(self.data_product_management.delete_data_product, data_product_id)

        subscription_id = self.pubsub_management.create_subscription('validator', data_product_ids=[data_product_id])
        self.addCleanup(self.pubsub_management.delete_subscription, subscription_id)

        validated = Event()
        def validation(msg, route, stream_id):
            validated.set()

        stream_ids, _ = self.resource_registry.find_objects(subject=data_product_id, predicate=PRED.hasStream, id_only=True)
        dp_stream_id = stream_ids.pop()

        validator = StandaloneStreamSubscriber('validator', callback=validation)
        validator.start()
        self.addCleanup(validator.stop)

        self.pubsub_management.activate_subscription(subscription_id)
        self.addCleanup(self.pubsub_management.deactivate_subscription, subscription_id)

        route = self.pubsub_management.read_stream_route(dp_stream_id)

        publisher = StandaloneStreamPublisher(dp_stream_id, route)
        publisher.publish('hi')
        self.assertTrue(validated.wait(10))
            

    def test_subscription_crud(self):
        stream_def_id = self.pubsub_management.create_stream_definition('test_definition', stream_type='stream')
        stream_id, route = self.pubsub_management.create_stream(name='test_stream', exchange_point='test_exchange', stream_definition_id=stream_def_id)
        subscription_id = self.pubsub_management.create_subscription(name='test subscription', stream_ids=[stream_id], exchange_name='test_queue')
        self.exchange_cleanup.append('test_exchange')

        subs, assocs = self.resource_registry.find_objects(subject=subscription_id,predicate=PRED.hasStream,id_only=True)
        self.assertEquals(subs,[stream_id])

        res, _ = self.resource_registry.find_resources(restype=RT.ExchangeName, name='test_queue', id_only=True)
        self.assertEquals(len(res),1)

        subs, assocs = self.resource_registry.find_subjects(object=subscription_id, predicate=PRED.hasSubscription, id_only=True)
        self.assertEquals(subs[0], res[0])

        subscription = self.pubsub_management.read_subscription(subscription_id)
        self.assertEquals(subscription.exchange_name, 'test_queue')

        self.pubsub_management.delete_subscription(subscription_id)
        
        subs, assocs = self.resource_registry.find_objects(subject=subscription_id,predicate=PRED.hasStream,id_only=True)
        self.assertFalse(len(subs))

        subs, assocs = self.resource_registry.find_subjects(object=subscription_id, predicate=PRED.hasSubscription, id_only=True)
        self.assertFalse(len(subs))


        self.pubsub_management.delete_stream(stream_id)
        self.pubsub_management.delete_stream_definition(stream_def_id)

    def test_move_before_activate(self):
        stream_id, route = self.pubsub_management.create_stream(name='test_stream', exchange_point='test_xp')

        #--------------------------------------------------------------------------------
        # Test moving before activate
        #--------------------------------------------------------------------------------

        subscription_id = self.pubsub_management.create_subscription('first_queue', stream_ids=[stream_id])

        xn_ids, _ = self.resource_registry.find_resources(restype=RT.ExchangeName, name='first_queue', id_only=True)
        subjects, _ = self.resource_registry.find_subjects(object=subscription_id, predicate=PRED.hasSubscription, id_only=True)
        self.assertEquals(xn_ids[0], subjects[0])

        self.pubsub_management.move_subscription(subscription_id, exchange_name='second_queue')

        xn_ids, _ = self.resource_registry.find_resources(restype=RT.ExchangeName, name='second_queue', id_only=True)
        subjects, _ = self.resource_registry.find_subjects(object=subscription_id, predicate=PRED.hasSubscription, id_only=True)

        self.assertEquals(len(subjects),1)
        self.assertEquals(subjects[0], xn_ids[0])

        self.pubsub_management.delete_subscription(subscription_id)
        self.pubsub_management.delete_stream(stream_id)

    def test_move_activated_subscription(self):

        stream_id, route = self.pubsub_management.create_stream(name='test_stream', exchange_point='test_xp')
        #--------------------------------------------------------------------------------
        # Test moving after activate
        #--------------------------------------------------------------------------------

        subscription_id = self.pubsub_management.create_subscription('first_queue', stream_ids=[stream_id])
        self.pubsub_management.activate_subscription(subscription_id)

        xn_ids, _ = self.resource_registry.find_resources(restype=RT.ExchangeName, name='first_queue', id_only=True)
        subjects, _ = self.resource_registry.find_subjects(object=subscription_id, predicate=PRED.hasSubscription, id_only=True)
        self.assertEquals(xn_ids[0], subjects[0])

        self.verified = Event()

        def verify(m,r,s):
            self.assertEquals(m,'verified')
            self.verified.set()

        subscriber = StandaloneStreamSubscriber('second_queue', verify)
        subscriber.start()

        self.pubsub_management.move_subscription(subscription_id, exchange_name='second_queue')

        xn_ids, _ = self.resource_registry.find_resources(restype=RT.ExchangeName, name='second_queue', id_only=True)
        subjects, _ = self.resource_registry.find_subjects(object=subscription_id, predicate=PRED.hasSubscription, id_only=True)

        self.assertEquals(len(subjects),1)
        self.assertEquals(subjects[0], xn_ids[0])

        publisher = StandaloneStreamPublisher(stream_id, route)
        publisher.publish('verified')

        self.assertTrue(self.verified.wait(2))

        self.pubsub_management.deactivate_subscription(subscription_id)

        self.pubsub_management.delete_subscription(subscription_id)
        self.pubsub_management.delete_stream(stream_id)

    def test_queue_cleanup(self):
        stream_id, route = self.pubsub_management.create_stream('test_stream','xp1')
        xn_objs, _ = self.resource_registry.find_resources(restype=RT.ExchangeName, name='queue1')
        for xn_obj in xn_objs:
            xn = self.container.ex_manager.create_xn_queue(xn_obj.name)
            xn.delete()
        subscription_id = self.pubsub_management.create_subscription('queue1',stream_ids=[stream_id])
        xn_ids, _ = self.resource_registry.find_resources(restype=RT.ExchangeName, name='queue1')
        self.assertEquals(len(xn_ids),1)

        self.pubsub_management.delete_subscription(subscription_id)

        xn_ids, _ = self.resource_registry.find_resources(restype=RT.ExchangeName, name='queue1')
        self.assertEquals(len(xn_ids),0)

    def test_activation_and_deactivation(self):
        stream_id, route = self.pubsub_management.create_stream('stream1','xp1')
        subscription_id = self.pubsub_management.create_subscription('sub1', stream_ids=[stream_id])

        self.check1 = Event()

        def verifier(m,r,s):
            self.check1.set()


        subscriber = StandaloneStreamSubscriber('sub1',verifier)
        subscriber.start()

        publisher = StandaloneStreamPublisher(stream_id, route)
        publisher.publish('should not receive')

        self.assertFalse(self.check1.wait(0.25))

        self.pubsub_management.activate_subscription(subscription_id)

        publisher.publish('should receive')
        self.assertTrue(self.check1.wait(2))

        self.check1.clear()
        self.assertFalse(self.check1.is_set())

        self.pubsub_management.deactivate_subscription(subscription_id)

        publisher.publish('should not receive')
        self.assertFalse(self.check1.wait(0.5))

        self.pubsub_management.activate_subscription(subscription_id)

        publisher.publish('should receive')
        self.assertTrue(self.check1.wait(2))

        subscriber.stop()

        self.pubsub_management.deactivate_subscription(subscription_id)
        self.pubsub_management.delete_subscription(subscription_id)
        self.pubsub_management.delete_stream(stream_id)

        

    def test_topic_crud(self):

        topic_id = self.pubsub_management.create_topic(name='test_topic', exchange_point='test_xp')
        self.exchange_cleanup.append('test_xp')

        topic = self.pubsub_management.read_topic(topic_id)

        self.assertEquals(topic.name,'test_topic')
        self.assertEquals(topic.exchange_point, 'test_xp')

        self.pubsub_management.delete_topic(topic_id)
        with self.assertRaises(NotFound):
            self.pubsub_management.read_topic(topic_id)

    def test_full_pubsub(self):

        self.sub1_sat = Event()
        self.sub2_sat = Event()

        def subscriber1(m,r,s):
            self.sub1_sat.set()

        def subscriber2(m,r,s):
            self.sub2_sat.set()

        sub1 = StandaloneStreamSubscriber('sub1', subscriber1)
        sub1.start()
        self.addCleanup(sub1.stop)

        sub2 = StandaloneStreamSubscriber('sub2', subscriber2)
        sub2.start()
        self.addCleanup(sub2.stop)

        log_topic = self.pubsub_management.create_topic('instrument_logs', exchange_point='instruments')
        self.addCleanup(self.pubsub_management.delete_topic, log_topic)
        science_topic = self.pubsub_management.create_topic('science_data', exchange_point='instruments')
        self.addCleanup(self.pubsub_management.delete_topic, science_topic)
        events_topic = self.pubsub_management.create_topic('notifications', exchange_point='events')
        self.addCleanup(self.pubsub_management.delete_topic, events_topic)


        log_stream, route = self.pubsub_management.create_stream('instrument1-logs', topic_ids=[log_topic], exchange_point='instruments')
        self.addCleanup(self.pubsub_management.delete_stream, log_stream)
        ctd_stream, route = self.pubsub_management.create_stream('instrument1-ctd', topic_ids=[science_topic], exchange_point='instruments')
        self.addCleanup(self.pubsub_management.delete_stream, ctd_stream)
        event_stream, route = self.pubsub_management.create_stream('notifications', topic_ids=[events_topic], exchange_point='events')
        self.addCleanup(self.pubsub_management.delete_stream, event_stream)
        raw_stream, route = self.pubsub_management.create_stream('temp', exchange_point='global.data')
        self.addCleanup(self.pubsub_management.delete_stream, raw_stream)


        subscription1 = self.pubsub_management.create_subscription('subscription1', stream_ids=[log_stream,event_stream], exchange_name='sub1')
        self.addCleanup(self.pubsub_management.delete_subscription, subscription1)
        subscription2 = self.pubsub_management.create_subscription('subscription2', exchange_points=['global.data'], stream_ids=[ctd_stream], exchange_name='sub2')
        self.addCleanup(self.pubsub_management.delete_subscription, subscription2)

        self.pubsub_management.activate_subscription(subscription1)
        self.addCleanup(self.pubsub_management.deactivate_subscription, subscription1)
        self.pubsub_management.activate_subscription(subscription2)
        self.addCleanup(self.pubsub_management.deactivate_subscription, subscription2)

        self.publish_on_stream(log_stream, 1)
        self.assertTrue(self.sub1_sat.wait(4))
        self.assertFalse(self.sub2_sat.is_set())

        self.publish_on_stream(raw_stream,1)
        self.assertTrue(self.sub1_sat.wait(4))
    
    def test_topic_craziness(self):

        self.msg_queue = Queue()

        def subscriber1(m,r,s):
            self.msg_queue.put(m)

        sub1 = StandaloneStreamSubscriber('sub1', subscriber1)
        sub1.start()
        self.addCleanup(sub1.stop)

        topic1 = self.pubsub_management.create_topic('topic1', exchange_point='xp1')
        self.addCleanup(self.pubsub_management.delete_topic, topic1)
        topic2 = self.pubsub_management.create_topic('topic2', exchange_point='xp1', parent_topic_id=topic1)
        self.addCleanup(self.pubsub_management.delete_topic, topic2)
        topic3 = self.pubsub_management.create_topic('topic3', exchange_point='xp1', parent_topic_id=topic1)
        self.addCleanup(self.pubsub_management.delete_topic, topic3)
        topic4 = self.pubsub_management.create_topic('topic4', exchange_point='xp1', parent_topic_id=topic2)
        self.addCleanup(self.pubsub_management.delete_topic, topic4)
        topic5 = self.pubsub_management.create_topic('topic5', exchange_point='xp1', parent_topic_id=topic2)
        self.addCleanup(self.pubsub_management.delete_topic, topic5)
        topic6 = self.pubsub_management.create_topic('topic6', exchange_point='xp1', parent_topic_id=topic3)
        self.addCleanup(self.pubsub_management.delete_topic, topic6)
        topic7 = self.pubsub_management.create_topic('topic7', exchange_point='xp1', parent_topic_id=topic3)
        self.addCleanup(self.pubsub_management.delete_topic, topic7)

        # Tree 2
        topic8 = self.pubsub_management.create_topic('topic8', exchange_point='xp2')
        self.addCleanup(self.pubsub_management.delete_topic, topic8)
        topic9 = self.pubsub_management.create_topic('topic9', exchange_point='xp2', parent_topic_id=topic8)
        self.addCleanup(self.pubsub_management.delete_topic, topic9)
        topic10 = self.pubsub_management.create_topic('topic10', exchange_point='xp2', parent_topic_id=topic9)
        self.addCleanup(self.pubsub_management.delete_topic, topic10)
        topic11 = self.pubsub_management.create_topic('topic11', exchange_point='xp2', parent_topic_id=topic9)
        self.addCleanup(self.pubsub_management.delete_topic, topic11)
        topic12 = self.pubsub_management.create_topic('topic12', exchange_point='xp2', parent_topic_id=topic11)
        self.addCleanup(self.pubsub_management.delete_topic, topic12)
        topic13 = self.pubsub_management.create_topic('topic13', exchange_point='xp2', parent_topic_id=topic11)
        self.addCleanup(self.pubsub_management.delete_topic, topic13)
        self.exchange_cleanup.extend(['xp1','xp2'])
        
        stream1_id, route = self.pubsub_management.create_stream('stream1', topic_ids=[topic7, topic4, topic5], exchange_point='xp1')
        self.addCleanup(self.pubsub_management.delete_stream, stream1_id)
        stream2_id, route = self.pubsub_management.create_stream('stream2', topic_ids=[topic8], exchange_point='xp2')
        self.addCleanup(self.pubsub_management.delete_stream, stream2_id)
        stream3_id, route = self.pubsub_management.create_stream('stream3', topic_ids=[topic10,topic13], exchange_point='xp2')
        self.addCleanup(self.pubsub_management.delete_stream, stream3_id)
        stream4_id, route = self.pubsub_management.create_stream('stream4', topic_ids=[topic9], exchange_point='xp2')
        self.addCleanup(self.pubsub_management.delete_stream, stream4_id)
        stream5_id, route = self.pubsub_management.create_stream('stream5', topic_ids=[topic11], exchange_point='xp2')
        self.addCleanup(self.pubsub_management.delete_stream, stream5_id)

        subscription1 = self.pubsub_management.create_subscription('sub1', topic_ids=[topic1])
        self.addCleanup(self.pubsub_management.delete_subscription, subscription1)
        subscription2 = self.pubsub_management.create_subscription('sub2', topic_ids=[topic8], exchange_name='sub1')
        self.addCleanup(self.pubsub_management.delete_subscription, subscription2)
        subscription3 = self.pubsub_management.create_subscription('sub3', topic_ids=[topic9], exchange_name='sub1')
        self.addCleanup(self.pubsub_management.delete_subscription, subscription3)
        subscription4 = self.pubsub_management.create_subscription('sub4', topic_ids=[topic10,topic13, topic11], exchange_name='sub1')
        self.addCleanup(self.pubsub_management.delete_subscription, subscription4)
        #--------------------------------------------------------------------------------
        self.pubsub_management.activate_subscription(subscription1)

        self.publish_on_stream(stream1_id,1)

        self.assertEquals(self.msg_queue.get(timeout=10), 1)
        with self.assertRaises(Empty):
            self.msg_queue.get(timeout=0.1)


        self.pubsub_management.deactivate_subscription(subscription1)
        #--------------------------------------------------------------------------------
        self.pubsub_management.activate_subscription(subscription2)
        
        self.publish_on_stream(stream2_id,2)
        self.assertEquals(self.msg_queue.get(timeout=10), 2)
        with self.assertRaises(Empty):
            self.msg_queue.get(timeout=0.1)

        self.pubsub_management.deactivate_subscription(subscription2)

        #--------------------------------------------------------------------------------
        self.pubsub_management.activate_subscription(subscription3)

        self.publish_on_stream(stream2_id, 3)
        with self.assertRaises(Empty):
            self.msg_queue.get(timeout=0.3)

        self.publish_on_stream(stream3_id, 4)
        self.assertEquals(self.msg_queue.get(timeout=10),4)


        self.pubsub_management.deactivate_subscription(subscription3)

        #--------------------------------------------------------------------------------
        self.pubsub_management.activate_subscription(subscription4)

        self.publish_on_stream(stream4_id, 5)
        with self.assertRaises(Empty):
            self.msg_queue.get(timeout=0.3)

        self.publish_on_stream(stream5_id, 6)
        self.assertEquals(self.msg_queue.get(timeout=10),6)
        with self.assertRaises(Empty):
            self.msg_queue.get(timeout=0.3)

        self.pubsub_management.deactivate_subscription(subscription4)
        
        #--------------------------------------------------------------------------------
    
    def cleanup_contexts(self):
        for context_id in self.context_ids:
            self.dataset_management.delete_parameter_context(context_id)

    def add_context_to_cleanup(self, context_id):
        self.context_ids.add(context_id)

    def _get_pdict(self, filter_values):
        t_ctxt = ParameterContext('TIME', param_type=QuantityType(value_encoding=np.dtype('int64')))
        t_ctxt.uom = 'seconds since 01-01-1900'
        t_ctxt_id = self.dataset_management.create_parameter_context(name='TIME', parameter_context=t_ctxt.dump(), parameter_type='quantity<int64>', units=t_ctxt.uom)
        self.add_context_to_cleanup(t_ctxt_id)

        lat_ctxt = ParameterContext('LAT', param_type=ConstantType(QuantityType(value_encoding=np.dtype('float32'))), fill_value=-9999)
        lat_ctxt.axis = AxisTypeEnum.LAT
        lat_ctxt.uom = 'degree_north'
        lat_ctxt_id = self.dataset_management.create_parameter_context(name='LAT', parameter_context=lat_ctxt.dump(), parameter_type='quantity<float32>', units=lat_ctxt.uom)
        self.add_context_to_cleanup(lat_ctxt_id)


        lon_ctxt = ParameterContext('LON', param_type=ConstantType(QuantityType(value_encoding=np.dtype('float32'))), fill_value=-9999)
        lon_ctxt.axis = AxisTypeEnum.LON
        lon_ctxt.uom = 'degree_east'
        lon_ctxt_id = self.dataset_management.create_parameter_context(name='LON', parameter_context=lon_ctxt.dump(), parameter_type='quantity<float32>', units=lon_ctxt.uom)
        self.add_context_to_cleanup(lon_ctxt_id)


        # Independent Parameters
         # Temperature - values expected to be the decimal results of conversion from hex
        temp_ctxt = ParameterContext('TEMPWAT_L0', param_type=QuantityType(value_encoding=np.dtype('float32')), fill_value=-9999)
        temp_ctxt.uom = 'deg_C'
        temp_ctxt_id = self.dataset_management.create_parameter_context(name='TEMPWAT_L0', parameter_context=temp_ctxt.dump(), parameter_type='quantity<float32>', units=temp_ctxt.uom)
        self.add_context_to_cleanup(temp_ctxt_id)


        # Conductivity - values expected to be the decimal results of conversion from hex
        cond_ctxt = ParameterContext('CONDWAT_L0', param_type=QuantityType(value_encoding=np.dtype('float32')), fill_value=-9999)
        cond_ctxt.uom = 'S m-1'
        cond_ctxt_id = self.dataset_management.create_parameter_context(name='CONDWAT_L0', parameter_context=cond_ctxt.dump(), parameter_type='quantity<float32>', units=cond_ctxt.uom)
        self.add_context_to_cleanup(cond_ctxt_id)


        # Pressure - values expected to be the decimal results of conversion from hex
        press_ctxt = ParameterContext('PRESWAT_L0', param_type=QuantityType(value_encoding=np.dtype('float32')), fill_value=-9999)
        press_ctxt.uom = 'dbar'
        press_ctxt_id = self.dataset_management.create_parameter_context(name='PRESWAT_L0', parameter_context=press_ctxt.dump(), parameter_type='quantity<float32>', units=press_ctxt.uom)
        self.add_context_to_cleanup(press_ctxt_id)


        # Dependent Parameters

        # TEMPWAT_L1 = (TEMPWAT_L0 / 10000) - 10
        tl1_func = '(T / 10000) - 10'
        tl1_pmap = {'T': 'TEMPWAT_L0'}
        expr = NumexprFunction('TEMPWAT_L1', tl1_func, ['T'], param_map=tl1_pmap)
        tempL1_ctxt = ParameterContext('TEMPWAT_L1', param_type=ParameterFunctionType(function=expr), variability=VariabilityEnum.TEMPORAL)
        tempL1_ctxt.uom = 'deg_C'
        tempL1_ctxt_id = self.dataset_management.create_parameter_context(name=tempL1_ctxt.name, parameter_context=tempL1_ctxt.dump(), parameter_type='pfunc', units=tempL1_ctxt.uom)
        self.add_context_to_cleanup(tempL1_ctxt_id)


        # CONDWAT_L1 = (CONDWAT_L0 / 100000) - 0.5
        cl1_func = '(C / 100000) - 0.5'
        cl1_pmap = {'C': 'CONDWAT_L0'}
        expr = NumexprFunction('CONDWAT_L1', cl1_func, ['C'], param_map=cl1_pmap)
        condL1_ctxt = ParameterContext('CONDWAT_L1', param_type=ParameterFunctionType(function=expr), variability=VariabilityEnum.TEMPORAL)
        condL1_ctxt.uom = 'S m-1'
        condL1_ctxt_id = self.dataset_management.create_parameter_context(name=condL1_ctxt.name, parameter_context=condL1_ctxt.dump(), parameter_type='pfunc', units=condL1_ctxt.uom)
        self.add_context_to_cleanup(condL1_ctxt_id)


        # Equation uses p_range, which is a calibration coefficient - Fixing to 679.34040721
        #   PRESWAT_L1 = (PRESWAT_L0 * p_range / (0.85 * 65536)) - (0.05 * p_range)
        pl1_func = '(P * p_range / (0.85 * 65536)) - (0.05 * p_range)'
        pl1_pmap = {'P': 'PRESWAT_L0', 'p_range': 679.34040721}
        expr = NumexprFunction('PRESWAT_L1', pl1_func, ['P', 'p_range'], param_map=pl1_pmap)
        presL1_ctxt = ParameterContext('PRESWAT_L1', param_type=ParameterFunctionType(function=expr), variability=VariabilityEnum.TEMPORAL)
        presL1_ctxt.uom = 'S m-1'
        presL1_ctxt_id = self.dataset_management.create_parameter_context(name=presL1_ctxt.name, parameter_context=presL1_ctxt.dump(), parameter_type='pfunc', units=presL1_ctxt.uom)
        self.add_context_to_cleanup(presL1_ctxt_id)


        # Density & practical salinity calucluated using the Gibbs Seawater library - available via python-gsw project:
        #       https://code.google.com/p/python-gsw/ & http://pypi.python.org/pypi/gsw/3.0.1

        # PRACSAL = gsw.SP_from_C((CONDWAT_L1 * 10), TEMPWAT_L1, PRESWAT_L1)
        owner = 'gsw'
        sal_func = 'SP_from_C'
        sal_arglist = ['C', 't', 'p']
        sal_pmap = {'C': NumexprFunction('CONDWAT_L1*10', 'C*10', ['C'], param_map={'C': 'CONDWAT_L1'}), 't': 'TEMPWAT_L1', 'p': 'PRESWAT_L1'}
        sal_kwargmap = None
        expr = PythonFunction('PRACSAL', owner, sal_func, sal_arglist, sal_kwargmap, sal_pmap)
        sal_ctxt = ParameterContext('PRACSAL', param_type=ParameterFunctionType(expr), variability=VariabilityEnum.TEMPORAL)
        sal_ctxt.uom = 'g kg-1'
        sal_ctxt_id = self.dataset_management.create_parameter_context(name=sal_ctxt.name, parameter_context=sal_ctxt.dump(), parameter_type='pfunc', units=sal_ctxt.uom)
        self.add_context_to_cleanup(sal_ctxt_id)


        # absolute_salinity = gsw.SA_from_SP(PRACSAL, PRESWAT_L1, longitude, latitude)
        # conservative_temperature = gsw.CT_from_t(absolute_salinity, TEMPWAT_L1, PRESWAT_L1)
        # DENSITY = gsw.rho(absolute_salinity, conservative_temperature, PRESWAT_L1)
        owner = 'gsw'
        abs_sal_expr = PythonFunction('abs_sal', owner, 'SA_from_SP', ['PRACSAL', 'PRESWAT_L1', 'LON','LAT'])
        cons_temp_expr = PythonFunction('cons_temp', owner, 'CT_from_t', [abs_sal_expr, 'TEMPWAT_L1', 'PRESWAT_L1'])
        dens_expr = PythonFunction('DENSITY', owner, 'rho', [abs_sal_expr, cons_temp_expr, 'PRESWAT_L1'])
        dens_ctxt = ParameterContext('DENSITY', param_type=ParameterFunctionType(dens_expr), variability=VariabilityEnum.TEMPORAL)
        dens_ctxt.uom = 'kg m-3'
        dens_ctxt_id = self.dataset_management.create_parameter_context(name=dens_ctxt.name, parameter_context=dens_ctxt.dump(), parameter_type='pfunc', units=dens_ctxt.uom)
        self.add_context_to_cleanup(dens_ctxt_id)

        
        ids = [t_ctxt_id, lat_ctxt_id, lon_ctxt_id, temp_ctxt_id, cond_ctxt_id, press_ctxt_id, tempL1_ctxt_id, condL1_ctxt_id, presL1_ctxt_id, sal_ctxt_id, dens_ctxt_id]
        contexts = [t_ctxt, lat_ctxt, lon_ctxt, temp_ctxt, cond_ctxt, press_ctxt, tempL1_ctxt, condL1_ctxt, presL1_ctxt, sal_ctxt, dens_ctxt]
        context_ids = [ids[i] for i,ctxt in enumerate(contexts) if ctxt.name in filter_values]
        pdict_name = '_'.join([ctxt.name for ctxt in contexts if ctxt.name in filter_values])

        try:
            self.pdicts[pdict_name]
            return self.pdicts[pdict_name]
        except KeyError:
            pdict_id = self.dataset_management.create_parameter_dictionary(pdict_name, parameter_context_ids=context_ids, temporal_context='time')
            self.addCleanup(self.dataset_management.delete_parameter_dictionary, pdict_id)
            self.pdicts[pdict_name] = pdict_id
            return pdict_id
示例#18
0
class TestWorkflowManagementIntegration(VisualizationIntegrationTestHelper):

    def setUp(self):
        # Start container

        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.pubsubclient =  PubsubManagementServiceClient(node=self.container.node)
        self.ingestclient = IngestionManagementServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node)
        self.datasetclient =  DatasetManagementServiceClient(node=self.container.node)
        self.workflowclient = WorkflowManagementServiceClient(node=self.container.node)
        self.process_dispatcher = ProcessDispatcherServiceClient(node=self.container.node)
        self.data_retriever = DataRetrieverServiceClient(node=self.container.node)

        self.ctd_stream_def = SBE37_CDM_stream_definition()



    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),'Not integrated for CEI')
    #@unittest.skip("Skipping for debugging ")
    def test_SA_transform_components(self):

        assertions = self.assertTrue

        #The list of data product streams to monitor
        data_product_stream_ids = list()

        #Create the input data product
        ctd_stream_id, ctd_parsed_data_product_id = self.create_ctd_input_stream_and_data_product()
        data_product_stream_ids.append(ctd_stream_id)


        ###
        ###  Setup the first transformation
        ###

        # Salinity: Data Process Definition
        ctd_L2_salinity_dprocdef_id = self.create_salinity_data_process_definition()

        l2_salinity_all_data_process_id, ctd_l2_salinity_output_dp_id = self.create_transform_process(ctd_L2_salinity_dprocdef_id,ctd_parsed_data_product_id )

        ## get the stream id for the transform outputs
        stream_ids, _ = self.rrclient.find_objects(ctd_l2_salinity_output_dp_id, PRED.hasStream, None, True)
        assertions(len(stream_ids) > 0 )
        sal_stream_id = stream_ids[0]
        data_product_stream_ids.append(sal_stream_id)


        ###
        ###  Setup the second transformation
        ###

        # Salinity Doubler: Data Process Definition
        salinity_doubler_dprocdef_id = self.create_salinity_doubler_data_process_definition()

        salinity_double_data_process_id, salinity_doubler_output_dp_id = self.create_transform_process(salinity_doubler_dprocdef_id, ctd_l2_salinity_output_dp_id )

        stream_ids, _ = self.rrclient.find_objects(salinity_doubler_output_dp_id, PRED.hasStream, None, True)
        assertions(len(stream_ids) > 0 )
        sal_dbl_stream_id = stream_ids[0]
        data_product_stream_ids.append(sal_dbl_stream_id)


        #Start the output stream listener to monitor and collect messages
        results = self.start_output_stream_and_listen(ctd_stream_id, data_product_stream_ids)

        #Stop the transform processes
        self.dataprocessclient.deactivate_data_process(salinity_double_data_process_id)
        self.dataprocessclient.deactivate_data_process(l2_salinity_all_data_process_id)

        #Validate the data from each of the messages along the way
        self.validate_messages(results)

    @attr('LOCOINT')
    @attr('SMOKE')
    @unittest.skip("not working")
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),'Not integrated for CEI')
    #@unittest.skip("Skipping for debugging ")
    def test_transform_workflow(self):

        assertions = self.assertTrue

        # Build the workflow definition
        workflow_def_obj = IonObject(RT.WorkflowDefinition, name='Salinity_Test_Workflow',description='tests a workflow of multiple transform data processes')

        workflow_data_product_name = 'TEST-Workflow_Output_Product' #Set a specific output product name

        #Add a transformation process definition
        ctd_L2_salinity_dprocdef_id = self.create_salinity_data_process_definition()
        workflow_step_obj = IonObject('DataProcessWorkflowStep', data_process_definition_id=ctd_L2_salinity_dprocdef_id, persist_process_output_data=False)  #Don't persist the intermediate data product
        workflow_def_obj.workflow_steps.append(workflow_step_obj)

        #Add a transformation process definition
        salinity_doubler_dprocdef_id = self.create_salinity_doubler_data_process_definition()
        workflow_step_obj = IonObject('DataProcessWorkflowStep', data_process_definition_id=salinity_doubler_dprocdef_id, output_data_product_name=workflow_data_product_name)
        workflow_def_obj.workflow_steps.append(workflow_step_obj)

        #Create it in the resource registry
        workflow_def_id = self.workflowclient.create_workflow_definition(workflow_def_obj)

        aids = self.rrclient.find_associations(workflow_def_id, PRED.hasDataProcessDefinition)
        assertions(len(aids) == 2 )

        #The list of data product streams to monitor
        data_product_stream_ids = list()

        #Create the input data product
        ctd_stream_id, ctd_parsed_data_product_id = self.create_ctd_input_stream_and_data_product()
        data_product_stream_ids.append(ctd_stream_id)

        #Create and start the workflow
        workflow_id, workflow_product_id = self.workflowclient.create_data_process_workflow(workflow_def_id, ctd_parsed_data_product_id, timeout=30)

        workflow_output_ids,_ = self.rrclient.find_subjects(RT.Workflow, PRED.hasOutputProduct, workflow_product_id, True)
        assertions(len(workflow_output_ids) == 1 )

        #persist the output product
        #self.dataproductclient.activate_data_product_persistence(workflow_product_id)
        dataset_ids,_ = self.rrclient.find_objects(workflow_product_id, PRED.hasDataset, RT.DataSet, True)
        assertions(len(dataset_ids) == 1 )
        dataset_id = dataset_ids[0]

        #Verify the output data product name matches what was specified in the workflow definition
        workflow_product = self.rrclient.read(workflow_product_id)
        assertions(workflow_product.name == workflow_data_product_name)

        #Walk the associations to find the appropriate output data streams to validate the messages
        workflow_dp_ids,_ = self.rrclient.find_objects(workflow_id, PRED.hasDataProduct, RT.DataProduct, True)
        assertions(len(workflow_dp_ids) == 2 )

        for dp_id in workflow_dp_ids:
            stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStream, None, True)
            assertions(len(stream_ids) == 1 )
            data_product_stream_ids.append(stream_ids[0])

        #Start the output stream listener to monitor and collect messages
        results = self.start_output_stream_and_listen(ctd_stream_id, data_product_stream_ids)

        #Stop the workflow processes
        self.workflowclient.terminate_data_process_workflow(workflow_id, False, timeout=15)  # Should test true at some point

        #Make sure the Workflow object was removed
        objs, _ = self.rrclient.find_resources(restype=RT.Workflow)
        assertions(len(objs) == 0)

        #Validate the data from each of the messages along the way
        self.validate_messages(results)

        #validate that the data was persisted and can be retrieved
        self.validate_data_ingest_retrieve(dataset_id)

        #Cleanup to make sure delete is correct.
        self.workflowclient.delete_workflow_definition(workflow_def_id)

        workflow_def_ids,_ = self.rrclient.find_resources(restype=RT.WorkflowDefinition)
        assertions(len(workflow_def_ids) == 0 )

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),'Not integrated for CEI')
    #@unittest.skip("Skipping for debugging ")
    def test_google_dt_transform_workflow(self):

        assertions = self.assertTrue

        # Build the workflow definition
        workflow_def_obj = IonObject(RT.WorkflowDefinition, name='GoogleDT_Test_Workflow',description='Tests the workflow of converting stream data to Google DT')

        #Add a transformation process definition
        google_dt_procdef_id = self.create_google_dt_data_process_definition()
        workflow_step_obj = IonObject('DataProcessWorkflowStep', data_process_definition_id=google_dt_procdef_id, persist_process_output_data=True)
        workflow_def_obj.workflow_steps.append(workflow_step_obj)

        #Create it in the resource registry
        workflow_def_id = self.workflowclient.create_workflow_definition(workflow_def_obj)

        #The list of data product streams to monitor
        data_product_stream_ids = list()

        #Create the input data product
        ctd_stream_id, ctd_parsed_data_product_id = self.create_ctd_input_stream_and_data_product()
        data_product_stream_ids.append(ctd_stream_id)

        #Create and start the workflow
        workflow_id, workflow_product_id = self.workflowclient.create_data_process_workflow(workflow_def_id, ctd_parsed_data_product_id, timeout=20)

        workflow_output_ids,_ = self.rrclient.find_subjects(RT.Workflow, PRED.hasOutputProduct, workflow_product_id, True)
        assertions(len(workflow_output_ids) == 1 )

        #Walk the associations to find the appropriate output data streams to validate the messages
        workflow_dp_ids,_ = self.rrclient.find_objects(workflow_id, PRED.hasDataProduct, RT.DataProduct, True)
        assertions(len(workflow_dp_ids) == 1 )

        for dp_id in workflow_dp_ids:
            stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStream, None, True)
            assertions(len(stream_ids) == 1 )
            data_product_stream_ids.append(stream_ids[0])

        #Start the output stream listener to monitor and collect messages
        results = self.start_output_stream_and_listen(ctd_stream_id, data_product_stream_ids)

        #Stop the workflow processes
        self.workflowclient.terminate_data_process_workflow(workflow_id, False)  # Should test true at some point

        #Validate the data from each of the messages along the way
        self.validate_google_dt_transform_results(results)

        # Check to see if ingestion worked. Extract the granules from data_retrieval.
        # First find the dataset associated with the output dp product
        ds_ids,_ = self.rrclient.find_objects(workflow_dp_ids[len(workflow_dp_ids) - 1], PRED.hasDataset, RT.DataSet, True)
        retrieve_granule = self.data_retriever.retrieve(ds_ids[0])

        #Validate the data from each of the messages along the way
        self.validate_google_dt_transform_results(retrieve_granule)

        #Cleanup to make sure delete is correct.
        self.workflowclient.delete_workflow_definition(workflow_def_id)

        workflow_def_ids,_ = self.rrclient.find_resources(restype=RT.WorkflowDefinition)
        assertions(len(workflow_def_ids) == 0 )



    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),'Not integrated for CEI')
    #@unittest.skip("Skipping for debugging ")
    def test_mpl_graphs_transform_workflow(self):

        assertions = self.assertTrue

        # Build the workflow definition
        workflow_def_obj = IonObject(RT.WorkflowDefinition, name='Mpl_Graphs_Test_Workflow',description='Tests the workflow of converting stream data to Matplotlib graphs')

        #Add a transformation process definition
        mpl_graphs_procdef_id = self.create_mpl_graphs_data_process_definition()
        workflow_step_obj = IonObject('DataProcessWorkflowStep', data_process_definition_id=mpl_graphs_procdef_id, persist_process_output_data=True)
        workflow_def_obj.workflow_steps.append(workflow_step_obj)

        #Create it in the resource registry
        workflow_def_id = self.workflowclient.create_workflow_definition(workflow_def_obj)

        #The list of data product streams to monitor
        data_product_stream_ids = list()

        #Create the input data product
        ctd_stream_id, ctd_parsed_data_product_id = self.create_ctd_input_stream_and_data_product()
        data_product_stream_ids.append(ctd_stream_id)

        #Create and start the workflow
        workflow_id, workflow_product_id = self.workflowclient.create_data_process_workflow(workflow_def_id, ctd_parsed_data_product_id, timeout=20)

        workflow_output_ids,_ = self.rrclient.find_subjects(RT.Workflow, PRED.hasOutputProduct, workflow_product_id, True)
        assertions(len(workflow_output_ids) == 1 )

        #Walk the associations to find the appropriate output data streams to validate the messages
        workflow_dp_ids,_ = self.rrclient.find_objects(workflow_id, PRED.hasDataProduct, RT.DataProduct, True)
        assertions(len(workflow_dp_ids) == 1 )

        for dp_id in workflow_dp_ids:
            stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStream, None, True)
            assertions(len(stream_ids) == 1 )
            data_product_stream_ids.append(stream_ids[0])

        #Start the output stream listener to monitor and collect messages
        results = self.start_output_stream_and_listen(ctd_stream_id, data_product_stream_ids)

        #Stop the workflow processes
        self.workflowclient.terminate_data_process_workflow(workflow_id, False)  # Should test true at some point

        #Validate the data from each of the messages along the way
        self.validate_mpl_graphs_transform_results(results)

        # Check to see if ingestion worked. Extract the granules from data_retrieval.
        # First find the dataset associated with the output dp product
        ds_ids,_ = self.rrclient.find_objects(workflow_dp_ids[len(workflow_dp_ids) - 1], PRED.hasDataset, RT.DataSet, True)

        retrieve_granule = self.data_retriever.retrieve(ds_ids[0])

        #Validate the data from each of the messages along the way
        self.validate_mpl_graphs_transform_results(retrieve_granule)

        #Cleanup to make sure delete is correct.
        self.workflowclient.delete_workflow_definition(workflow_def_id)

        workflow_def_ids,_ = self.rrclient.find_resources(restype=RT.WorkflowDefinition)
        assertions(len(workflow_def_ids) == 0 )


    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),'Not integrated for CEI')
    #@unittest.skip("Skipping for debugging ")
    def test_multiple_workflow_instances(self):

        assertions = self.assertTrue

        # Build the workflow definition
        workflow_def_obj = IonObject(RT.WorkflowDefinition, name='Multiple_Test_Workflow',description='Tests the workflow of converting stream data')

        #Add a transformation process definition
        google_dt_procdef_id = self.create_google_dt_data_process_definition()
        workflow_step_obj = IonObject('DataProcessWorkflowStep', data_process_definition_id=google_dt_procdef_id)
        workflow_def_obj.workflow_steps.append(workflow_step_obj)

        #Create it in the resource registry
        workflow_def_id = self.workflowclient.create_workflow_definition(workflow_def_obj)

        #The list of data product streams to monitor
        data_product_stream_ids = list()

        #Create the first input data product
        ctd_stream_id1, ctd_parsed_data_product_id1 = self.create_ctd_input_stream_and_data_product('ctd_parsed1')
        data_product_stream_ids.append(ctd_stream_id1)

        #Create and start the first workflow
        workflow_id1, workflow_product_id1 = self.workflowclient.create_data_process_workflow(workflow_def_id, ctd_parsed_data_product_id1, timeout=20)

        #Create the second input data product
        ctd_stream_id2, ctd_parsed_data_product_id2 = self.create_ctd_input_stream_and_data_product('ctd_parsed2')
        data_product_stream_ids.append(ctd_stream_id2)

        #Create and start the first workflow
        workflow_id2, workflow_product_id2 = self.workflowclient.create_data_process_workflow(workflow_def_id, ctd_parsed_data_product_id2, timeout=20)

        #Walk the associations to find the appropriate output data streams to validate the messages
        workflow_ids,_ = self.rrclient.find_resources(restype=RT.Workflow)
        assertions(len(workflow_ids) == 2 )


        #Start the first input stream process
        ctd_sim_pid1 = self.start_sinusoidal_input_stream_process(ctd_stream_id1)

        #Start the second input stream process
        ctd_sim_pid2 = self.start_simple_input_stream_process(ctd_stream_id2)

        #Start the output stream listener to monitor a set number of messages being sent through the workflows
        results = self.start_output_stream_and_listen(None, data_product_stream_ids, message_count_per_stream=5)

        # stop the flow of messages...
        self.process_dispatcher.cancel_process(ctd_sim_pid1) # kill the ctd simulator process - that is enough data
        self.process_dispatcher.cancel_process(ctd_sim_pid2)

        #Stop the first workflow processes
        self.workflowclient.terminate_data_process_workflow(workflow_id1, False)  # Should test true at some point

        #Stop the second workflow processes
        self.workflowclient.terminate_data_process_workflow(workflow_id2, False)  # Should test true at some point

        workflow_ids,_ = self.rrclient.find_resources(restype=RT.Workflow)
        assertions(len(workflow_ids) == 0 )

        #Cleanup to make sure delete is correct.
        self.workflowclient.delete_workflow_definition(workflow_def_id)

        workflow_def_ids,_ = self.rrclient.find_resources(restype=RT.WorkflowDefinition)
        assertions(len(workflow_def_ids) == 0 )

        aid_list = self.rrclient.find_associations(workflow_def_id, PRED.hasDataProcessDefinition)
        assertions(len(aid_list) == 0 )
示例#19
0
class VisStreamLauncher(ImmediateProcess):
    """
    Class emulates a stream source from a NetCDF file. It emits a record of data every few seconds on a
    stream identified by a routing key.

    """
    def on_init(self):
        log.debug("VizStreamProducer init. Self.id=%s" % self.id)

    def on_start(self):

        log.debug("VizStreamProducer start")

        self.data_source_name = self.CFG.get_safe('name',
                                                  'sine_wave_generator')
        self.dataset = self.CFG.get_safe('dataset', 'sinusoidal')

        # create a pubsub client and a resource registry client
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.pubsubclient = PubsubManagementServiceClient(
            node=self.container.node)

        # Dummy instrument related clients
        self.imsclient = InstrumentManagementServiceClient(
            node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(
            node=self.container.node)
        self.dpclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.ingestclient = IngestionManagementServiceClient(
            node=self.container.node)
        self.dataset_management = DatasetManagementServiceClient()

        # create the pubsub client
        self.pubsubclient = PubsubManagementServiceClient(
            node=self.container.node)

        # Additional code for creating a dummy instrument
        """
        # Set up the preconditions. Look for an existing ingestion config
        while True:
            log.info("VisStreamLauncher:on_start: Waiting for an ingestion configuration to be available.")
            ingestion_cfgs, _ = self.rrclient.find_resources(RT.IngestionConfiguration, None, None, True)

            if len(ingestion_cfgs) > 0:
                break
            else:
                gevent.sleep(1)
        """

        # Check to see if the data_product already exists in the system (for e.g re launching the code after a crash)
        dp_ids, _ = self.rrclient.find_resources(RT.DataProduct, None,
                                                 self.data_source_name, True)
        if len(dp_ids) > 0:
            data_product_id = dp_ids[0]
        else:
            # Create InstrumentModel
            instModel_obj = IonObject(RT.InstrumentModel,
                                      name=self.data_source_name,
                                      description=self.data_source_name)
            instModel_id = self.imsclient.create_instrument_model(
                instModel_obj)

            # Create InstrumentDevice
            instDevice_obj = IonObject(RT.InstrumentDevice,
                                       name=self.data_source_name,
                                       description=self.data_source_name,
                                       serial_number="12345")

            instDevice_id = self.imsclient.create_instrument_device(
                instrument_device=instDevice_obj)
            self.imsclient.assign_instrument_model_to_instrument_device(
                instModel_id, instDevice_id)

            # create a stream definition for the data from the ctd simulator
            pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
                'ctd_parsed_param_dict', id_only=True)
            ctd_stream_def_id = self.pubsubclient.create_stream_definition(
                name="SBE37_CDM",
                description="SBE37_CDM",
                parameter_dictionary_id=pdict_id)

            tdom, sdom = time_series_domain()
            sdom = sdom.dump()
            tdom = tdom.dump()

            dp_obj = IonObject(RT.DataProduct,
                               name=self.data_source_name,
                               description='Example ctd stream',
                               temporal_domain=tdom,
                               spatial_domain=sdom)

            data_product_id = self.dpclient.create_data_product(
                dp_obj, stream_definition_id=ctd_stream_def_id)

            self.damsclient.assign_data_product(
                input_resource_id=instDevice_id,
                data_product_id=data_product_id)
            self.dpclient.activate_data_product_persistence(
                data_product_id=data_product_id)

        print ">>>>>>>>>>>>> Dataproduct for sine wave generator : ", data_product_id

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(data_product_id,
                                                   PRED.hasStream, None, True)

        if self.dataset == 'sinusoidal':
            self.container.spawn_process(
                name='ctd_test.' + self.data_source_name,
                module='ion.processes.data.sinusoidal_stream_publisher',
                cls='SinusoidalCtdPublisher',
                config={'process': {
                    'stream_id': stream_ids[0]
                }})
        else:
            self.container.spawn_process(
                name='ctd_test.' + self.data_source_name,
                module='ion.processes.data.ctd_stream_publisher',
                cls='SimpleCtdPublisher',
                config={'process': {
                    'stream_id': stream_ids[0]
                }})
        """
        workflow_def_ids,_ = self.rrclient.find_resources(restype=RT.WorkflowDefinition, name='Realtime_HighCharts', id_only=True)

        if not len(workflow_def_ids):
            helper_create_highcharts_workflow_def(self.container)
        """

    def on_quit(self):
        super(VisStreamLauncher, self).on_quit()
        log.debug("VizStreamProducer quit")
示例#20
0
class TestDeployment(IonIntegrationTestCase):

    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.omsclient = ObservatoryManagementServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
        self.dmpsclient = DataProductManagementServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.psmsclient = PubsubManagementServiceClient(node=self.container.node)
        self.dataset_management = DatasetManagementServiceClient()

        self.c = DotDict()
        self.c.resource_registry = self.rrclient
        self.RR2 = EnhancedResourceRegistryClient(self.rrclient)

        self.dsmsclient = DataProcessManagementServiceClient(node=self.container.node)


        # deactivate all data processes when tests are complete
        def killAllDataProcesses():
            for proc_id in self.rrclient.find_resources(RT.DataProcess, None, None, True)[0]:
                self.dsmsclient.deactivate_data_process(proc_id)
                self.dsmsclient.delete_data_process(proc_id)
        self.addCleanup(killAllDataProcesses)


    #@unittest.skip("targeting")
    def test_create_deployment(self):

        #create a deployment with metadata and an initial site and device
        platform_site__obj = IonObject(RT.PlatformSite,
                                        name='PlatformSite1',
                                        description='test platform site')
        site_id = self.omsclient.create_platform_site(platform_site__obj)

        platform_device__obj = IonObject(RT.PlatformDevice,
                                        name='PlatformDevice1',
                                        description='test platform device')
        device_id = self.imsclient.create_platform_device(platform_device__obj)

        start = str(int(time.mktime(datetime.datetime(2013, 1, 1).timetuple())))
        end = str(int(time.mktime(datetime.datetime(2014, 1, 1).timetuple())))
        temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=start, end_datetime=end)
        deployment_obj = IonObject(RT.Deployment,
                                        name='TestDeployment',
                                        description='some new deployment',
                                        constraint_list=[temporal_bounds])
        deployment_id = self.omsclient.create_deployment(deployment_obj)
        self.omsclient.assign_site_to_deployment(site_id, deployment_id)
        self.omsclient.assign_device_to_deployment(device_id, deployment_id)

        log.debug("test_create_deployment: created deployment id: %s ", str(deployment_id) )

        #retrieve the deployment objects and check that the assoc site and device are attached
        read_deployment_obj = self.omsclient.read_deployment(deployment_id)
        log.debug("test_create_deployment: created deployment obj: %s ", str(read_deployment_obj) )

        site_ids, _ = self.rrclient.find_subjects(RT.PlatformSite, PRED.hasDeployment, deployment_id, True)
        self.assertEqual(len(site_ids), 1)

        device_ids, _ = self.rrclient.find_subjects(RT.PlatformDevice, PRED.hasDeployment, deployment_id, True)
        self.assertEqual(len(device_ids), 1)

        #delete the deployment
        self.omsclient.force_delete_deployment(deployment_id)
        # now try to get the deleted dp object
        try:
            self.omsclient.read_deployment(deployment_id)
        except NotFound:
            pass
        else:
            self.fail("deleted deployment was found during read")

    #@unittest.skip("targeting")
    def test_prepare_deployment_support(self):

        deploy_sup = self.omsclient.prepare_deployment_support()
        self.assertTrue(deploy_sup)
        self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentDevice'].type_, "AssocDeploymentInstDevice")
        self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentDevice'].resources, [])
        self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentDevice'].associated_resources, [])
        self.assertEquals(deploy_sup.associations['DeploymentHasPlatformDevice'].type_, "AssocDeploymentPlatDevice")
        self.assertEquals(deploy_sup.associations['DeploymentHasPlatformDevice'].resources, [])
        self.assertEquals(deploy_sup.associations['DeploymentHasPlatformDevice'].associated_resources, [])
        self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentSite'].type_, "AssocDeploymentInstSite")
        self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentSite'].resources, [])
        self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentSite'].associated_resources, [])
        self.assertEquals(deploy_sup.associations['DeploymentHasPlatformSite'].type_, "AssocDeploymentPlatSite")
        self.assertEquals(deploy_sup.associations['DeploymentHasPlatformSite'].resources, [])
        self.assertEquals(deploy_sup.associations['DeploymentHasPlatformSite'].associated_resources, [])

        #create a deployment with metadata and an initial site and device
        platform_site__obj = IonObject(RT.PlatformSite,
                                        name='PlatformSite1',
                                        description='test platform site')
        site_id = self.omsclient.create_platform_site(platform_site__obj)

        platform_device__obj = IonObject(RT.PlatformDevice,
                                        name='PlatformDevice1',
                                        description='test platform device')
        device_id = self.imsclient.create_platform_device(platform_device__obj)

        start = str(int(time.mktime(datetime.datetime(2013, 1, 1).timetuple())))
        end = str(int(time.mktime(datetime.datetime(2014, 1, 1).timetuple())))
        temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=start, end_datetime=end)
        deployment_obj = IonObject(RT.Deployment,
                                        name='TestDeployment',
                                        description='some new deployment',
                                        constraint_list=[temporal_bounds])
        deployment_id = self.omsclient.create_deployment(deployment_obj)

        deploy_sup = self.omsclient.prepare_deployment_support(deployment_id)

        self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentDevice'].resources, [])
        self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentDevice'].associated_resources, [])
        self.assertEquals(len(deploy_sup.associations['DeploymentHasPlatformDevice'].resources), 1)
        self.assertEquals(deploy_sup.associations['DeploymentHasPlatformDevice'].associated_resources, [])
        self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentSite'].resources, [])
        self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentSite'].associated_resources, [])
        self.assertEquals(len(deploy_sup.associations['DeploymentHasPlatformSite'].resources), 1)
        self.assertEquals(deploy_sup.associations['DeploymentHasPlatformSite'].associated_resources, [])

        self.omsclient.assign_site_to_deployment(site_id, deployment_id)
        self.omsclient.assign_device_to_deployment(device_id, deployment_id)

        deploy_sup = self.omsclient.prepare_deployment_support(deployment_id)

        self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentDevice'].resources, [])
        self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentDevice'].associated_resources, [])
        self.assertEquals(len(deploy_sup.associations['DeploymentHasPlatformDevice'].resources), 1)
        self.assertEquals(len(deploy_sup.associations['DeploymentHasPlatformDevice'].associated_resources), 1)
        self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentSite'].resources, [])
        self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentSite'].associated_resources, [])
        self.assertEquals(len(deploy_sup.associations['DeploymentHasPlatformSite'].resources), 1)
        self.assertEquals(len(deploy_sup.associations['DeploymentHasPlatformSite'].associated_resources), 1)

        #delete the deployment
        self.omsclient.force_delete_deployment(deployment_id)
        # now try to get the deleted dp object
        try:
            self.omsclient.read_deployment(deployment_id)
        except NotFound:
            pass
        else:
            self.fail("deleted deployment was found during read")


    #@unittest.skip("targeting")
    def base_activate_deployment(self, make_assigns=False):
        # Create platform site, platform device, platform model

        bounds = GeospatialBounds(geospatial_latitude_limit_north=float(5),
                                  geospatial_latitude_limit_south=float(5),
                                  geospatial_longitude_limit_west=float(15),
                                  geospatial_longitude_limit_east=float(15),
                                  geospatial_vertical_min=float(0),
                                  geospatial_vertical_max=float(1000))

        platform_site__obj = IonObject(RT.PlatformSite,
                                        name='PlatformSite1',
                                        description='test platform site',
                                        constraint_list=[bounds])
        platform_site_id = self.omsclient.create_platform_site(platform_site__obj)

        platform_device_obj = IonObject(RT.PlatformDevice,
                                        name='PlatformDevice1',
                                        description='test platform device')
        platform_device_id = self.imsclient.create_platform_device(platform_device_obj)

        platform_model__obj = IonObject(RT.PlatformModel,
                                        name='PlatformModel1',
                                        description='test platform model')
        platform_model_id = self.imsclient.create_platform_model(platform_model__obj)

        # Create instrument site
        #-------------------------------------------------------------------------------------

        bounds = GeospatialBounds(geospatial_latitude_limit_north=float(45),
                                  geospatial_latitude_limit_south=float(40),
                                  geospatial_longitude_limit_west=float(-75),
                                  geospatial_longitude_limit_east=float(-70),
                                  geospatial_vertical_min=float(0),
                                  geospatial_vertical_max=float(500))

        instrument_site_obj = IonObject(RT.InstrumentSite,
                                        name='InstrumentSite1',
                                        description='test instrument site',
                                        reference_designator='GA01SUMO-FI003-01-CTDMO0999',
                                        constraint_list=[bounds])
        instrument_site_id = self.omsclient.create_instrument_site(instrument_site_obj, platform_site_id)

        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        ctd_stream_def_id = self.psmsclient.create_stream_definition(name='SBE37_CDM', parameter_dictionary_id=pdict_id)

        # Create an instrument device
        instrument_device_obj = IonObject(RT.InstrumentDevice,
                                        name='InstrumentDevice1',
                                        description='test instrument device')
        instrument_device_id = self.imsclient.create_instrument_device(instrument_device_obj)
        self.rrclient.create_association(platform_device_id, PRED.hasDevice, instrument_device_id)

        pp_obj = IonObject(OT.PlatformPort, reference_designator='GA01SUMO-FI003-01-CTDMO0999', port_type= PortTypeEnum.PAYLOAD, ip_address='1' )
        port_assignments = {instrument_device_id : pp_obj}


        #----------------------------------------------------------------------------------------------------
        # Create an instrument model
        instrument_model_obj = IonObject(RT.InstrumentModel,
                                        name='InstrumentModel1',
                                        description='test instrument model')
        instrument_model_id = self.imsclient.create_instrument_model(instrument_model_obj)

        # Create a deployment object
        #----------------------------------------------------------------------------------------------------

        start = str(int(time.mktime(datetime.datetime(2013, 1, 1).timetuple())))
        end = str(int(time.mktime(datetime.datetime(2020, 1, 1).timetuple())))
        temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=start, end_datetime=end)
        deployment_obj = IonObject(RT.Deployment,
                                   name='TestDeployment',
                                   description='some new deployment',
                                   context=IonObject(OT.CabledNodeDeploymentContext),
                                   port_assignments=port_assignments,
                                   constraint_list=[temporal_bounds])
        deployment_id = self.omsclient.create_deployment(deployment_obj)

        log.debug("test_create_deployment: created deployment id: %s ", str(deployment_id) )

        if make_assigns:
            self.imsclient.assign_platform_model_to_platform_device(platform_model_id, platform_device_id)
            self.imsclient.assign_instrument_model_to_instrument_device(instrument_model_id, instrument_device_id)
            self.omsclient.assign_platform_model_to_platform_site(platform_model_id, platform_site_id)
            self.omsclient.assign_instrument_model_to_instrument_site(instrument_model_id, instrument_site_id)

            self.omsclient.assign_site_to_deployment(platform_site_id, deployment_id)
            self.omsclient.assign_device_to_deployment(platform_device_id, deployment_id)

        ret = DotDict(instrument_site_id=instrument_site_id,
                      instrument_device_id=instrument_device_id,
                      instrument_model_id=instrument_model_id,
                      platform_site_id=platform_site_id,
                      platform_device_id=platform_device_id,
                      platform_model_id=platform_model_id,
                      deployment_id=deployment_id)

        return ret

    def _create_subsequent_deployment(self, prior_dep_info):
        platform_device_obj = IonObject(RT.PlatformDevice,
                                        name='PlatformDevice2',
                                        description='test platform device')
        platform_device_id = self.imsclient.create_platform_device(platform_device_obj)

        instrument_device_obj = IonObject(RT.InstrumentDevice,
                                        name='InstrumentDevice2',
                                        description='test instrument device')
        instrument_device_id = self.imsclient.create_instrument_device(instrument_device_obj)
        self.rrclient.create_association(platform_device_id, PRED.hasDevice, instrument_device_id)

        self.imsclient.assign_platform_model_to_platform_device(prior_dep_info.platform_model_id, platform_device_id)
        self.imsclient.assign_instrument_model_to_instrument_device(prior_dep_info.instrument_model_id, instrument_device_id)

        start = str(int(time.mktime(datetime.datetime(2013, 6, 1).timetuple())))
        end = str(int(time.mktime(datetime.datetime(2020, 6, 1).timetuple())))
        temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=start, end_datetime=end)
        deployment_obj = IonObject(RT.Deployment,
                                   name='TestDeployment2',
                                   description='some new deployment',
                                   context=IonObject(OT.CabledNodeDeploymentContext),
                                   constraint_list=[temporal_bounds])
        deployment_id = self.omsclient.create_deployment(deployment_obj)

        self.omsclient.assign_site_to_deployment(prior_dep_info.platform_site_id, deployment_id)
        self.omsclient.assign_device_to_deployment(prior_dep_info.platform_device_id, deployment_id)

        log.debug("test_create_deployment: created deployment id: %s ", str(deployment_id) )

        ret = DotDict(instrument_device_id=instrument_device_id,
                      platform_device_id=platform_device_id,
                      deployment_id=deployment_id)

        return ret

    #@unittest.skip("targeting")
    def test_activate_deployment_normal(self):

        res = self.base_activate_deployment(make_assigns=True)

        before_activate_instrument_device_obj = self.rrclient.read(res.instrument_device_id)
        self.assertNotEquals(before_activate_instrument_device_obj.lcstate, LCS.DEPLOYED)

        log.debug("activating deployment, expecting success")
        self.omsclient.activate_deployment(res.deployment_id)

        # OOIION-1239: retrieve the extended resource and validate that only two sites are in the list of portals
        extended_deployment = self.omsclient.get_deployment_extension(res.deployment_id)
        self.assertEquals( len(extended_deployment.computed.portals.value), 2)

        def assertGeospatialBoundsEquals(a, b):
            self.assertEquals(a['geospatial_latitude_limit_north'],b['geospatial_latitude_limit_north'])
            self.assertEquals(a['geospatial_latitude_limit_south'],b['geospatial_latitude_limit_south'])
            self.assertEquals(a['geospatial_longitude_limit_west'],b['geospatial_longitude_limit_west'])
            self.assertEquals(a['geospatial_longitude_limit_east'],b['geospatial_longitude_limit_east'])

        def assertGeospatialBoundsNotEquals(a, b):
            self.assertNotEquals(a['geospatial_latitude_limit_north'],b['geospatial_latitude_limit_north'])
            self.assertNotEquals(a['geospatial_latitude_limit_south'],b['geospatial_latitude_limit_south'])
            self.assertNotEquals(a['geospatial_longitude_limit_west'],b['geospatial_longitude_limit_west'])
            self.assertNotEquals(a['geospatial_longitude_limit_east'],b['geospatial_longitude_limit_east'])

        after_activate_instrument_device_obj = self.rrclient.read(res.instrument_device_id)
        assertGeospatialBoundsNotEquals(before_activate_instrument_device_obj.geospatial_bounds,after_activate_instrument_device_obj.geospatial_bounds)

        deployment_obj = self.RR2.read(res.deployment_id)
        self.assertEquals(deployment_obj.lcstate, LCS.DEPLOYED)

        extended_deployment = self.omsclient.get_deployment_extension(res.deployment_id)
        # two sites in this test
        self.assertEquals(len(extended_deployment.computed.portals.value), 2)
        # only one portal instrument
        self.assertEquals(len(extended_deployment.portal_instruments), 1)

        log.debug("deactivatin deployment, expecting success")
        self.omsclient.deactivate_deployment(res.deployment_id)

        after_deactivate_instrument_device_obj = self.rrclient.read(res.instrument_device_id)
        assertGeospatialBoundsNotEquals(after_activate_instrument_device_obj.geospatial_bounds, after_deactivate_instrument_device_obj.geospatial_bounds)

        deployment_obj = self.RR2.read(res.deployment_id)
        self.assertEquals(deployment_obj.lcstate, LCS.INTEGRATED)

    def test_activate_deployment_redeploy(self):
        dep_util = DeploymentUtil(self.container)
        res = self.base_activate_deployment(make_assigns=True)

        log.debug("activating first deployment, expecting success")
        self.omsclient.activate_deployment(res.deployment_id)

        deployment_obj1 = self.RR2.read(res.deployment_id)
        self.assertEquals(deployment_obj1.lcstate, LCS.DEPLOYED)

        next_dep_info = self._create_subsequent_deployment(res)

        deployment_obj2 = self.RR2.read(next_dep_info.deployment_id)
        self.assertNotEquals(deployment_obj2.lcstate, LCS.DEPLOYED)

        log.debug("activating subsequent deployment, expecting success")
        self.omsclient.activate_deployment(next_dep_info.deployment_id)

        deployment_obj1 = self.RR2.read(res.deployment_id)
        self.assertEquals(deployment_obj1.lcstate, LCS.INTEGRATED)

        deployment_obj2 = self.RR2.read(next_dep_info.deployment_id)
        self.assertEquals(deployment_obj2.lcstate, LCS.DEPLOYED)

        dep1_tc = dep_util.get_temporal_constraint(deployment_obj1)
        dep2_tc = dep_util.get_temporal_constraint(deployment_obj2)
        self.assertLessEqual(float(dep1_tc.end_datetime), float(dep2_tc.end_datetime))

        log.debug("deactivating second deployment, expecting success")
        self.omsclient.deactivate_deployment(next_dep_info.deployment_id)

        deployment_obj2 = self.RR2.read(next_dep_info.deployment_id)
        self.assertEquals(deployment_obj2.lcstate, LCS.INTEGRATED)

    #@unittest.skip("targeting")
    def test_activate_deployment_nomodels(self):

        res = self.base_activate_deployment()

        self.omsclient.assign_site_to_deployment(res.platform_site_id, res.deployment_id)
        self.omsclient.assign_device_to_deployment(res.platform_device_id, res.deployment_id)

        log.debug("activating deployment without site+device models, expecting fail")
        self.assert_deploy_fail(res.deployment_id, NotFound, "Expected 1")

        log.debug("assigning instrument site model")
        self.omsclient.assign_instrument_model_to_instrument_site(res.instrument_model_id, res.instrument_site_id)

        log.debug("activating deployment without device models, expecting fail")
        self.assert_deploy_fail(res.deployment_id, NotFound, "Expected 1")

    #@unittest.skip("targeting")
    def test_activate_deployment_nosite(self):

        res = self.base_activate_deployment()

        log.debug("assigning instrument models")
        self.imsclient.assign_instrument_model_to_instrument_device(res.instrument_model_id, res.instrument_device_id)
        self.omsclient.assign_instrument_model_to_instrument_site(res.instrument_model_id, res.instrument_site_id)

        log.debug("deploying instrument device only")
        self.omsclient.assign_device_to_deployment(res.instrument_device_id, res.deployment_id)

        log.debug("activating deployment without instrument site, expecting fail")
        self.assert_deploy_fail(res.deployment_id, BadRequest)

    #@unittest.skip("targeting")
    def test_activate_deployment_nodevice(self):

        res = self.base_activate_deployment()

        log.debug("assigning platform and instrument models")
        self.imsclient.assign_instrument_model_to_instrument_device(res.instrument_model_id, res.instrument_device_id)
        self.omsclient.assign_instrument_model_to_instrument_site(res.instrument_model_id, res.instrument_site_id)

        log.debug("deploying instrument site only")
        self.omsclient.assign_site_to_deployment(res.instrument_site_id, res.deployment_id)

        log.debug("activating deployment without device, expecting fail")
        self.assert_deploy_fail(res.deployment_id, BadRequest, "No devices were found in the deployment")


    def assert_deploy_fail(self, deployment_id, err_type=BadRequest, fail_message=""):
        with self.assertRaises(err_type) as cm:
            self.omsclient.activate_deployment(deployment_id)
            log.debug("assert_deploy_fail cm: %s", str(cm) )
            if fail_message:
                self.assertIn(fail_message, cm.exception.message)

    def test_3x3_matchups_remoteplatform(self):
        self.base_3x3_matchups(IonObject(OT.RemotePlatformDeploymentContext))

    def test_3x3_matchups_cabledinstrument(self):
        self.base_3x3_matchups(IonObject(OT.CabledInstrumentDeploymentContext))

    def test_3x3_matchups_cablednode(self):
        self.base_3x3_matchups(IonObject(OT.CabledNodeDeploymentContext))

    def base_3x3_matchups(self, deployment_context):
        """
        This will be 1 root platform, 3 sub platforms (2 of one model, 1 of another) and 3 sub instruments each (2-to-1)
        """
        deployment_context_type = type(deployment_context).__name__

        instrument_model_id  = [self.RR2.create(any_old(RT.InstrumentModel)) for _ in range(6)]
        platform_model_id    = [self.RR2.create(any_old(RT.PlatformModel)) for _ in range(3)]

        instrument_device_id = [self.RR2.create(any_old(RT.InstrumentDevice)) for _ in range(9)]
        platform_device_id   = [self.RR2.create(any_old(RT.PlatformDevice)) for _ in range(4)]

        instrument_site_id   = [self.RR2.create(any_old(RT.InstrumentSite,
                                                { "reference_designator" : "GA01SUMO-FI003-0%s-CTDMO0999" % (i+1),
                                                    "planned_uplink_port":
                                                     IonObject(OT.PlatformPort,
                                                               reference_designator="GA01SUMO-FI003-0%s-CTDMO0999" % (i+1) )}))
                                for i in range(9)]

        platform_site_id     = [self.RR2.create(any_old(RT.PlatformSite,
                                                {  "reference_designator" : "GA01SUMO-FI003-0%s-CTDMO0888" % (i+1) ,
                                                    "planned_uplink_port":
                                                    IonObject(OT.PlatformPort,
                                                              reference_designator="GA01SUMO-FI003-0%s-CTDMO0888" % (i+1))}))
                                for i in range(4)]



        def instrument_model_at(platform_idx, instrument_idx):
            m = platform_idx * 2
            if instrument_idx > 0:
                m += 1
            return m

        def platform_model_at(platform_idx):
            if platform_idx > 0:
                return 1
            return 0

        def instrument_at(platform_idx, instrument_idx):
            return platform_idx * 3 + instrument_idx

        # set up the structure
        for p in range(3):
            m = platform_model_at(p)
            self.RR2.assign_platform_model_to_platform_site_with_has_model(platform_model_id[m], platform_site_id[p])
            self.RR2.assign_platform_model_to_platform_device_with_has_model(platform_model_id[m], platform_device_id[p])
            self.RR2.assign_platform_device_to_platform_device_with_has_device(platform_device_id[p], platform_device_id[3])
            self.RR2.assign_platform_site_to_platform_site_with_has_site(platform_site_id[p], platform_site_id[3])

            for i in range(3):
                m = instrument_model_at(p, i)
                idx = instrument_at(p, i)
                self.RR2.assign_instrument_model_to_instrument_site_with_has_model(instrument_model_id[m], instrument_site_id[idx])
                self.RR2.assign_instrument_model_to_instrument_device_with_has_model(instrument_model_id[m], instrument_device_id[idx])
                self.RR2.assign_instrument_device_to_platform_device_with_has_device(instrument_device_id[idx], platform_device_id[p])
                self.RR2.assign_instrument_site_to_platform_site_with_has_site(instrument_site_id[idx], platform_site_id[p])

        # top level models
        self.RR2.assign_platform_model_to_platform_device_with_has_model(platform_model_id[2], platform_device_id[3])
        self.RR2.assign_platform_model_to_platform_site_with_has_model(platform_model_id[2], platform_site_id[3])



        # verify structure
        for p in range(3):
            parent_id = self.RR2.find_platform_device_id_by_platform_device_using_has_device(platform_device_id[p])
            self.assertEqual(platform_device_id[3], parent_id)

            parent_id = self.RR2.find_platform_site_id_by_platform_site_using_has_site(platform_site_id[p])
            self.assertEqual(platform_site_id[3], parent_id)

        for i in range(len(platform_site_id)):
            self.assertEqual(self.RR2.find_platform_model_of_platform_device_using_has_model(platform_device_id[i]),
                             self.RR2.find_platform_model_of_platform_site_using_has_model(platform_site_id[i]))

        for i in range(len(instrument_site_id)):
            self.assertEqual(self.RR2.find_instrument_model_of_instrument_device_using_has_model(instrument_device_id[i]),
                             self.RR2.find_instrument_model_of_instrument_site_using_has_model(instrument_site_id[i]))


        # OOIReferenceDesignator format: GA01SUMO-FI003-03-CTDMO0999  (site-platform_id-port-device_id)

        port_assignments = {}
        for p in range(3):
            ref_desig = "GA01SUMO-FI003-0%s-CTDMO0888" % (p+1)
            pp_obj = IonObject(OT.PlatformPort, reference_designator=ref_desig, port_type= PortTypeEnum.PAYLOAD, ip_address=str(p) )
            port_assignments[platform_device_id[p]] = pp_obj
            for i in range(3):
                ref_desig = "GA01SUMO-FI003-0%s-CTDMO0999" % ((p*3)+i+1)
                pp_obj = IonObject(OT.PlatformPort, reference_designator=ref_desig, port_type= PortTypeEnum.PAYLOAD, ip_address=str(p) )
                idx = instrument_at(p, i)
                port_assignments[instrument_device_id[idx]] = pp_obj

        deployment_id = self.RR2.create(any_old(RT.Deployment,
                {"context": deployment_context,
                 "port_assignments": port_assignments}))


        log.debug("assigning device/site to %s deployment", deployment_context_type)
        if OT.RemotePlatformDeploymentContext == deployment_context_type:
            self.RR2.assign_deployment_to_platform_device_with_has_deployment(deployment_id, platform_device_id[3])
            self.RR2.assign_deployment_to_platform_site_with_has_deployment(deployment_id, platform_site_id[3])

        elif OT.CabledInstrumentDeploymentContext == deployment_context_type:
            self.RR2.assign_deployment_to_instrument_device_with_has_deployment(deployment_id, instrument_device_id[1])
            self.RR2.assign_deployment_to_instrument_site_with_has_deployment(deployment_id, instrument_site_id[1])

        elif OT.CabledNodeDeploymentContext == deployment_context_type:
            self.RR2.assign_deployment_to_platform_device_with_has_deployment(deployment_id, platform_device_id[1])
            self.RR2.assign_deployment_to_platform_site_with_has_deployment(deployment_id, platform_site_id[1])

        log.debug("activation of %s deployment", deployment_context_type)
        self.omsclient.activate_deployment(deployment_id)

        log.debug("validation of %s deployment", deployment_context_type)
        if OT.RemotePlatformDeploymentContext == deployment_context_type:
            # verify proper associations
            for i, d in enumerate(platform_device_id):
                self.assertEqual(d, self.RR2.find_platform_device_id_of_platform_site_using_has_device(platform_site_id[i]))

            for i, d in enumerate(instrument_device_id):
                self.assertEqual(d, self.RR2.find_instrument_device_id_of_instrument_site_using_has_device(instrument_site_id[i]))

        elif OT.CabledInstrumentDeploymentContext == deployment_context_type:
            self.assertEqual(instrument_device_id[1],
                             self.RR2.find_instrument_device_id_of_instrument_site_using_has_device(instrument_site_id[1]))

        elif OT.CabledNodeDeploymentContext == deployment_context_type:
            expected_platforms = [1]

            # verify proper associations
            for i, d in enumerate(platform_device_id):
                self.assertEqual(i in expected_platforms,
                                 d in self.RR2.find_platform_device_ids_of_platform_site_using_has_device(platform_site_id[i]))
示例#21
0
class TestDMEnd2End(IonIntegrationTestCase):
    def setUp(self):  # Love the non pep-8 convention
        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.process_dispatcher = ProcessDispatcherServiceClient()
        self.pubsub_management = PubsubManagementServiceClient()
        self.resource_registry = ResourceRegistryServiceClient()
        self.dataset_management = DatasetManagementServiceClient()
        self.ingestion_management = IngestionManagementServiceClient()
        self.data_retriever = DataRetrieverServiceClient()
        self.pids = []
        self.event = Event()
        self.exchange_space_name = 'test_granules'
        self.exchange_point_name = 'science_data'
        self.i = 0

        self.purge_queues()
        self.queue_buffer = []
        self.streams = []
        self.addCleanup(self.stop_all_ingestion)

    def purge_queues(self):
        xn = self.container.ex_manager.create_xn_queue(
            'science_granule_ingestion')
        xn.purge()

    def tearDown(self):
        self.purge_queues()
        for pid in self.pids:
            self.container.proc_manager.terminate_process(pid)
        IngestionManagementIntTest.clean_subscriptions()
        for queue in self.queue_buffer:
            if isinstance(queue, ExchangeNameQueue):
                queue.delete()
            elif isinstance(queue, str):
                xn = self.container.ex_manager.create_xn_queue(queue)
                xn.delete()

    #--------------------------------------------------------------------------------
    # Helper/Utility methods
    #--------------------------------------------------------------------------------

    def create_dataset(self, parameter_dict_id=''):
        '''
        Creates a time-series dataset
        '''
        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()
        if not parameter_dict_id:
            parameter_dict_id = self.dataset_management.read_parameter_dictionary_by_name(
                'ctd_parsed_param_dict', id_only=True)

        dataset_id = self.dataset_management.create_dataset(
            'test_dataset_%i' % self.i,
            parameter_dictionary_id=parameter_dict_id,
            spatial_domain=sdom,
            temporal_domain=tdom)
        return dataset_id

    def get_datastore(self, dataset_id):
        '''
        Gets an instance of the datastore
            This method is primarily used to defeat a bug where integration tests in multiple containers may sometimes 
            delete a CouchDB datastore and the other containers are unaware of the new state of the datastore.
        '''
        dataset = self.dataset_management.read_dataset(dataset_id)
        datastore_name = dataset.datastore_name
        datastore = self.container.datastore_manager.get_datastore(
            datastore_name, DataStore.DS_PROFILE.SCIDATA)
        return datastore

    def get_ingestion_config(self):
        '''
        Grab the ingestion configuration from the resource registry
        '''
        # The ingestion configuration should have been created by the bootstrap service
        # which is configured through r2deploy.yml

        ingest_configs, _ = self.resource_registry.find_resources(
            restype=RT.IngestionConfiguration, id_only=True)
        return ingest_configs[0]

    def launch_producer(self, stream_id=''):
        '''
        Launch the producer
        '''

        pid = self.container.spawn_process(
            'better_data_producer', 'ion.processes.data.example_data_producer',
            'BetterDataProducer', {'process': {
                'stream_id': stream_id
            }})

        self.pids.append(pid)

    def make_simple_dataset(self):
        '''
        Makes a stream, a stream definition and a dataset, the essentials for most of these tests
        '''
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            'ctd_parsed_param_dict', id_only=True)
        stream_def_id = self.pubsub_management.create_stream_definition(
            'ctd data', parameter_dictionary_id=pdict_id)
        stream_id, route = self.pubsub_management.create_stream(
            'ctd stream %i' % self.i,
            'xp1',
            stream_definition_id=stream_def_id)

        dataset_id = self.create_dataset(pdict_id)

        self.get_datastore(dataset_id)
        self.i += 1
        return stream_id, route, stream_def_id, dataset_id

    def publish_hifi(self, stream_id, stream_route, offset=0):
        '''
        Publish deterministic data
        '''

        pub = StandaloneStreamPublisher(stream_id, stream_route)

        stream_def = self.pubsub_management.read_stream_definition(
            stream_id=stream_id)
        stream_def_id = stream_def._id
        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
        rdt['time'] = np.arange(10) + (offset * 10)
        rdt['temp'] = np.arange(10) + (offset * 10)
        pub.publish(rdt.to_granule())

    def publish_fake_data(self, stream_id, route):
        '''
        Make four granules
        '''
        for i in xrange(4):
            self.publish_hifi(stream_id, route, i)

    def start_ingestion(self, stream_id, dataset_id):
        '''
        Starts ingestion/persistence for a given dataset
        '''
        ingest_config_id = self.get_ingestion_config()
        self.ingestion_management.persist_data_stream(
            stream_id=stream_id,
            ingestion_configuration_id=ingest_config_id,
            dataset_id=dataset_id)

    def stop_ingestion(self, stream_id):
        ingest_config_id = self.get_ingestion_config()
        self.ingestion_management.unpersist_data_stream(
            stream_id=stream_id, ingestion_configuration_id=ingest_config_id)

    def stop_all_ingestion(self):
        try:
            [self.stop_ingestion(sid) for sid in self.streams]
        except:
            pass

    def validate_granule_subscription(self, msg, route, stream_id):
        '''
        Validation for granule format
        '''
        if msg == {}:
            return
        rdt = RecordDictionaryTool.load_from_granule(msg)
        log.info('%s', rdt.pretty_print())
        self.assertIsInstance(
            msg, Granule, 'Message is improperly formatted. (%s)' % type(msg))
        self.event.set()

    def wait_until_we_have_enough_granules(self, dataset_id='', data_size=40):
        '''
        Loops until there is a sufficient amount of data in the dataset
        '''
        done = False
        with gevent.Timeout(40):
            while not done:
                extents = self.dataset_management.dataset_extents(
                    dataset_id, 'time')[0]
                granule = self.data_retriever.retrieve_last_data_points(
                    dataset_id, 1)
                rdt = RecordDictionaryTool.load_from_granule(granule)
                if rdt['time'] and rdt['time'][0] != rdt._pdict.get_context(
                        'time').fill_value and extents >= data_size:
                    done = True
                else:
                    gevent.sleep(0.2)

    #--------------------------------------------------------------------------------
    # Test Methods
    #--------------------------------------------------------------------------------

    @attr('SMOKE')
    def test_dm_end_2_end(self):
        #--------------------------------------------------------------------------------
        # Set up a stream and have a mock instrument (producer) send data
        #--------------------------------------------------------------------------------
        self.event.clear()

        # Get a precompiled parameter dictionary with basic ctd fields
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            'ctd_parsed_param_dict', id_only=True)
        context_ids = self.dataset_management.read_parameter_contexts(
            pdict_id, id_only=True)

        # Add a field that supports binary data input.
        bin_context = ParameterContext('binary', param_type=ArrayType())
        context_ids.append(
            self.dataset_management.create_parameter_context(
                'binary', bin_context.dump()))
        # Add another field that supports dictionary elements.
        rec_context = ParameterContext('records', param_type=RecordType())
        context_ids.append(
            self.dataset_management.create_parameter_context(
                'records', rec_context.dump()))

        pdict_id = self.dataset_management.create_parameter_dictionary(
            'replay_pdict',
            parameter_context_ids=context_ids,
            temporal_context='time')

        stream_definition = self.pubsub_management.create_stream_definition(
            'ctd data', parameter_dictionary_id=pdict_id)

        stream_id, route = self.pubsub_management.create_stream(
            'producer',
            exchange_point=self.exchange_point_name,
            stream_definition_id=stream_definition)

        #--------------------------------------------------------------------------------
        # Start persisting the data on the stream
        # - Get the ingestion configuration from the resource registry
        # - Create the dataset
        # - call persist_data_stream to setup the subscription for the ingestion workers
        #   on the stream that you specify which causes the data to be persisted
        #--------------------------------------------------------------------------------

        ingest_config_id = self.get_ingestion_config()
        dataset_id = self.create_dataset(pdict_id)
        self.ingestion_management.persist_data_stream(
            stream_id=stream_id,
            ingestion_configuration_id=ingest_config_id,
            dataset_id=dataset_id)

        #--------------------------------------------------------------------------------
        # Now the granules are ingesting and persisted
        #--------------------------------------------------------------------------------

        self.launch_producer(stream_id)
        self.wait_until_we_have_enough_granules(dataset_id, 40)

        #--------------------------------------------------------------------------------
        # Now get the data in one chunk using an RPC Call to start_retreive
        #--------------------------------------------------------------------------------

        replay_data = self.data_retriever.retrieve(dataset_id)
        self.assertIsInstance(replay_data, Granule)
        rdt = RecordDictionaryTool.load_from_granule(replay_data)
        self.assertTrue((rdt['time'][:10] == np.arange(10)).all(),
                        '%s' % rdt['time'][:])
        self.assertTrue((rdt['binary'][:10] == np.array(['hi'] * 10,
                                                        dtype='object')).all())

        #--------------------------------------------------------------------------------
        # Now to try the streamed approach
        #--------------------------------------------------------------------------------
        replay_stream_id, replay_route = self.pubsub_management.create_stream(
            'replay_out',
            exchange_point=self.exchange_point_name,
            stream_definition_id=stream_definition)
        self.replay_id, process_id = self.data_retriever.define_replay(
            dataset_id=dataset_id, stream_id=replay_stream_id)
        log.info('Process ID: %s', process_id)

        replay_client = ReplayClient(process_id)

        #--------------------------------------------------------------------------------
        # Create the listening endpoint for the the retriever to talk to
        #--------------------------------------------------------------------------------
        xp = self.container.ex_manager.create_xp(self.exchange_point_name)
        subscriber = StandaloneStreamSubscriber(
            self.exchange_space_name, self.validate_granule_subscription)
        self.queue_buffer.append(self.exchange_space_name)
        subscriber.start()
        subscriber.xn.bind(replay_route.routing_key, xp)

        self.data_retriever.start_replay_agent(self.replay_id)

        self.assertTrue(replay_client.await_agent_ready(5),
                        'The process never launched')
        replay_client.start_replay()

        self.assertTrue(self.event.wait(10))
        subscriber.stop()

        self.data_retriever.cancel_replay_agent(self.replay_id)

        #--------------------------------------------------------------------------------
        # Test the slicing capabilities
        #--------------------------------------------------------------------------------

        granule = self.data_retriever.retrieve(dataset_id=dataset_id,
                                               query={'tdoa': slice(0, 5)})
        rdt = RecordDictionaryTool.load_from_granule(granule)
        b = rdt['time'] == np.arange(5)
        self.assertTrue(b.all() if not isinstance(b, bool) else b)
        self.streams.append(stream_id)
        self.stop_ingestion(stream_id)

    @unittest.skip('Doesnt work')
    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),
                     'Skip test while in CEI LAUNCH mode')
    def test_replay_pause(self):
        # Get a precompiled parameter dictionary with basic ctd fields
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            'ctd_parsed_param_dict', id_only=True)
        context_ids = self.dataset_management.read_parameter_contexts(
            pdict_id, id_only=True)

        # Add a field that supports binary data input.
        bin_context = ParameterContext('binary', param_type=ArrayType())
        context_ids.append(
            self.dataset_management.create_parameter_context(
                'binary', bin_context.dump()))
        # Add another field that supports dictionary elements.
        rec_context = ParameterContext('records', param_type=RecordType())
        context_ids.append(
            self.dataset_management.create_parameter_context(
                'records', rec_context.dump()))

        pdict_id = self.dataset_management.create_parameter_dictionary(
            'replay_pdict',
            parameter_context_ids=context_ids,
            temporal_context='time')

        stream_def_id = self.pubsub_management.create_stream_definition(
            'replay_stream', parameter_dictionary_id=pdict_id)
        replay_stream, replay_route = self.pubsub_management.create_stream(
            'replay', 'xp1', stream_definition_id=stream_def_id)
        dataset_id = self.create_dataset(pdict_id)
        scov = DatasetManagementService._get_coverage(dataset_id)

        bb = CoverageCraft(scov)
        bb.rdt['time'] = np.arange(100)
        bb.rdt['temp'] = np.random.random(100) + 30
        bb.sync_with_granule()

        DatasetManagementService._persist_coverage(
            dataset_id,
            bb.coverage)  # This invalidates it for multi-host configurations
        # Set up the subscriber to verify the data
        subscriber = StandaloneStreamSubscriber(
            self.exchange_space_name, self.validate_granule_subscription)
        xp = self.container.ex_manager.create_xp('xp1')
        self.queue_buffer.append(self.exchange_space_name)
        subscriber.start()
        subscriber.xn.bind(replay_route.routing_key, xp)

        # Set up the replay agent and the client wrapper

        # 1) Define the Replay (dataset and stream to publish on)
        self.replay_id, process_id = self.data_retriever.define_replay(
            dataset_id=dataset_id, stream_id=replay_stream)
        # 2) Make a client to the interact with the process (optionall provide it a process to bind with)
        replay_client = ReplayClient(process_id)
        # 3) Start the agent (launch the process)
        self.data_retriever.start_replay_agent(self.replay_id)
        # 4) Start replaying...
        replay_client.start_replay()

        # Wait till we get some granules
        self.assertTrue(self.event.wait(5))

        # We got granules, pause the replay, clear the queue and allow the process to finish consuming
        replay_client.pause_replay()
        gevent.sleep(1)
        subscriber.xn.purge()
        self.event.clear()

        # Make sure there's no remaining messages being consumed
        self.assertFalse(self.event.wait(1))

        # Resume the replay and wait until we start getting granules again
        replay_client.resume_replay()
        self.assertTrue(self.event.wait(5))

        # Stop the replay, clear the queues
        replay_client.stop_replay()
        gevent.sleep(1)
        subscriber.xn.purge()
        self.event.clear()

        # Make sure that it did indeed stop
        self.assertFalse(self.event.wait(1))

        subscriber.stop()

    def test_retrieve_and_transform(self):
        # Make a simple dataset and start ingestion, pretty standard stuff.
        ctd_stream_id, route, stream_def_id, dataset_id = self.make_simple_dataset(
        )
        self.start_ingestion(ctd_stream_id, dataset_id)

        # Stream definition for the salinity data
        salinity_pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            'ctd_parsed_param_dict', id_only=True)
        sal_stream_def_id = self.pubsub_management.create_stream_definition(
            'sal data', parameter_dictionary_id=salinity_pdict_id)

        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
        rdt['time'] = np.arange(10)
        rdt['temp'] = np.random.randn(10) * 10 + 30
        rdt['conductivity'] = np.random.randn(10) * 2 + 10
        rdt['pressure'] = np.random.randn(10) * 1 + 12

        publisher = StandaloneStreamPublisher(ctd_stream_id, route)
        publisher.publish(rdt.to_granule())

        rdt['time'] = np.arange(10, 20)

        publisher.publish(rdt.to_granule())

        self.wait_until_we_have_enough_granules(dataset_id, 20)

        granule = self.data_retriever.retrieve(
            dataset_id,
            None,
            None,
            'ion.processes.data.transforms.ctd.ctd_L2_salinity',
            'CTDL2SalinityTransformAlgorithm',
            kwargs=dict(params=sal_stream_def_id))
        rdt = RecordDictionaryTool.load_from_granule(granule)
        for i in rdt['salinity']:
            self.assertNotEquals(i, 0)
        self.streams.append(ctd_stream_id)
        self.stop_ingestion(ctd_stream_id)

    def test_last_granule(self):
        stream_id, route, stream_def_id, dataset_id = self.make_simple_dataset(
        )
        self.start_ingestion(stream_id, dataset_id)

        self.publish_hifi(stream_id, route, 0)
        self.publish_hifi(stream_id, route, 1)

        self.wait_until_we_have_enough_granules(dataset_id,
                                                20)  # I just need two

        success = False

        def verifier():
            replay_granule = self.data_retriever.retrieve_last_data_points(
                dataset_id, 10)

            rdt = RecordDictionaryTool.load_from_granule(replay_granule)

            comp = rdt['time'] == np.arange(10) + 10
            if not isinstance(comp, bool):
                return comp.all()
            return False

        success = poll(verifier)

        self.assertTrue(success)

        success = False

        def verify_points():
            replay_granule = self.data_retriever.retrieve_last_data_points(
                dataset_id, 5)

            rdt = RecordDictionaryTool.load_from_granule(replay_granule)

            comp = rdt['time'] == np.arange(15, 20)
            if not isinstance(comp, bool):
                return comp.all()
            return False

        success = poll(verify_points)

        self.assertTrue(success)
        self.streams.append(stream_id)
        self.stop_ingestion(stream_id)

    def test_replay_with_parameters(self):
        #--------------------------------------------------------------------------------
        # Create the configurations and the dataset
        #--------------------------------------------------------------------------------
        # Get a precompiled parameter dictionary with basic ctd fields
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            'ctd_parsed_param_dict', id_only=True)
        context_ids = self.dataset_management.read_parameter_contexts(
            pdict_id, id_only=True)

        # Add a field that supports binary data input.
        bin_context = ParameterContext('binary', param_type=ArrayType())
        context_ids.append(
            self.dataset_management.create_parameter_context(
                'binary', bin_context.dump()))
        # Add another field that supports dictionary elements.
        rec_context = ParameterContext('records', param_type=RecordType())
        context_ids.append(
            self.dataset_management.create_parameter_context(
                'records', rec_context.dump()))

        pdict_id = self.dataset_management.create_parameter_dictionary(
            'replay_pdict',
            parameter_context_ids=context_ids,
            temporal_context='time')

        stream_def_id = self.pubsub_management.create_stream_definition(
            'replay_stream', parameter_dictionary_id=pdict_id)

        stream_id, route = self.pubsub_management.create_stream(
            'replay_with_params',
            exchange_point=self.exchange_point_name,
            stream_definition_id=stream_def_id)
        config_id = self.get_ingestion_config()
        dataset_id = self.create_dataset(pdict_id)
        self.ingestion_management.persist_data_stream(
            stream_id=stream_id,
            ingestion_configuration_id=config_id,
            dataset_id=dataset_id)

        dataset_modified = Event()

        def cb(*args, **kwargs):
            dataset_modified.set()

        es = EventSubscriber(event_type=OT.DatasetModified,
                             callback=cb,
                             origin=dataset_id)
        es.start()

        self.addCleanup(es.stop)

        self.publish_fake_data(stream_id, route)

        self.assertTrue(dataset_modified.wait(30))

        query = {
            'start_time': 0 - 2208988800,
            'end_time': 20 - 2208988800,
            'stride_time': 2,
            'parameters': ['time', 'temp']
        }
        retrieved_data = self.data_retriever.retrieve(dataset_id=dataset_id,
                                                      query=query)

        rdt = RecordDictionaryTool.load_from_granule(retrieved_data)
        comp = np.arange(0, 20, 2) == rdt['time']
        self.assertTrue(comp.all(), '%s' % rdt.pretty_print())
        self.assertEquals(set(rdt.iterkeys()), set(['time', 'temp']))

        extents = self.dataset_management.dataset_extents(
            dataset_id=dataset_id, parameters=['time', 'temp'])
        self.assertTrue(extents['time'] >= 20)
        self.assertTrue(extents['temp'] >= 20)

        self.streams.append(stream_id)
        self.stop_ingestion(stream_id)

    def test_repersist_data(self):
        stream_id, route, stream_def_id, dataset_id = self.make_simple_dataset(
        )
        self.start_ingestion(stream_id, dataset_id)
        self.publish_hifi(stream_id, route, 0)
        self.publish_hifi(stream_id, route, 1)
        self.wait_until_we_have_enough_granules(dataset_id, 20)
        config_id = self.get_ingestion_config()
        self.ingestion_management.unpersist_data_stream(
            stream_id=stream_id, ingestion_configuration_id=config_id)
        self.ingestion_management.persist_data_stream(
            stream_id=stream_id,
            ingestion_configuration_id=config_id,
            dataset_id=dataset_id)
        self.publish_hifi(stream_id, route, 2)
        self.publish_hifi(stream_id, route, 3)
        self.wait_until_we_have_enough_granules(dataset_id, 40)
        success = False
        with gevent.timeout.Timeout(5):
            while not success:

                replay_granule = self.data_retriever.retrieve(dataset_id)

                rdt = RecordDictionaryTool.load_from_granule(replay_granule)

                comp = rdt['time'] == np.arange(0, 40)
                if not isinstance(comp, bool):
                    success = comp.all()
                gevent.sleep(1)

        self.assertTrue(success)
        self.streams.append(stream_id)
        self.stop_ingestion(stream_id)

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv(
        'CEI_LAUNCH_TEST', False
    ), 'Host requires file-system access to coverage files, CEI mode does not support.'
                     )
    def test_correct_time(self):

        # There are 2208988800 seconds between Jan 1 1900 and Jan 1 1970, i.e.
        #  the conversion factor between unix and NTP time
        unix_now = np.floor(time.time())
        ntp_now = unix_now + 2208988800

        unix_ago = unix_now - 20
        ntp_ago = unix_ago + 2208988800

        stream_id, route, stream_def_id, dataset_id = self.make_simple_dataset(
        )
        coverage = DatasetManagementService._get_coverage(dataset_id)
        coverage.insert_timesteps(20)
        coverage.set_parameter_values('time', np.arange(ntp_ago, ntp_now))

        temporal_bounds = self.dataset_management.dataset_temporal_bounds(
            dataset_id)

        self.assertTrue(np.abs(temporal_bounds[0] - unix_ago) < 2)
        self.assertTrue(np.abs(temporal_bounds[1] - unix_now) < 2)

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv(
        'CEI_LAUNCH_TEST', False
    ), 'Host requires file-system access to coverage files, CEI mode does not support.'
                     )
    def test_empty_coverage_time(self):

        stream_id, route, stream_def_id, dataset_id = self.make_simple_dataset(
        )
        coverage = DatasetManagementService._get_coverage(dataset_id)
        temporal_bounds = self.dataset_management.dataset_temporal_bounds(
            dataset_id)
        self.assertEquals([coverage.get_parameter_context('time').fill_value] *
                          2, temporal_bounds)

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv(
        'CEI_LAUNCH_TEST', False
    ), 'Host requires file-system access to coverage files, CEI mode does not support.'
                     )
    def test_out_of_band_retrieve(self):
        # Setup the environemnt
        stream_id, route, stream_def_id, dataset_id = self.make_simple_dataset(
        )
        self.start_ingestion(stream_id, dataset_id)

        # Fill the dataset
        self.publish_fake_data(stream_id, route)
        self.wait_until_we_have_enough_granules(dataset_id, 40)

        # Retrieve the data
        granule = DataRetrieverService.retrieve_oob(dataset_id)
        rdt = RecordDictionaryTool.load_from_granule(granule)
        self.assertTrue((rdt['time'] == np.arange(40)).all())

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv(
        'CEI_LAUNCH_TEST', False
    ), 'Host requires file-system access to coverage files, CEI mode does not support.'
                     )
    def test_retrieve_cache(self):
        DataRetrieverService._refresh_interval = 1
        datasets = [self.make_simple_dataset() for i in xrange(10)]
        for stream_id, route, stream_def_id, dataset_id in datasets:
            coverage = DatasetManagementService._get_coverage(dataset_id)
            coverage.insert_timesteps(10)
            coverage.set_parameter_values('time', np.arange(10))
            coverage.set_parameter_values('temp', np.arange(10))

        # Verify cache hit and refresh
        dataset_ids = [i[3] for i in datasets]
        self.assertTrue(
            dataset_ids[0] not in DataRetrieverService._retrieve_cache)
        DataRetrieverService._get_coverage(dataset_ids[0])  # Hit the chache
        cov, age = DataRetrieverService._retrieve_cache[dataset_ids[0]]
        # Verify that it was hit and it's now in there
        self.assertTrue(dataset_ids[0] in DataRetrieverService._retrieve_cache)

        gevent.sleep(DataRetrieverService._refresh_interval + 0.2)

        DataRetrieverService._get_coverage(dataset_ids[0])  # Hit the chache
        cov, age2 = DataRetrieverService._retrieve_cache[dataset_ids[0]]
        self.assertTrue(age2 != age)

        for dataset_id in dataset_ids:
            DataRetrieverService._get_coverage(dataset_id)

        self.assertTrue(
            dataset_ids[0] not in DataRetrieverService._retrieve_cache)

        stream_id, route, stream_def, dataset_id = datasets[0]
        self.start_ingestion(stream_id, dataset_id)
        DataRetrieverService._get_coverage(dataset_id)

        self.assertTrue(dataset_id in DataRetrieverService._retrieve_cache)

        DataRetrieverService._refresh_interval = 100
        self.publish_hifi(stream_id, route, 1)
        self.wait_until_we_have_enough_granules(dataset_id, data_size=20)

        event = gevent.event.Event()
        with gevent.Timeout(20):
            while not event.wait(0.1):
                if dataset_id not in DataRetrieverService._retrieve_cache:
                    event.set()

        self.assertTrue(event.is_set())

    @unittest.skip('Outdated due to ingestion retry')
    @attr('LOCOINT')
    @unittest.skipIf(os.getenv(
        'CEI_LAUNCH_TEST', False
    ), 'Host requires file-system access to coverage files, CEI mode does not support.'
                     )
    def test_ingestion_failover(self):
        stream_id, route, stream_def_id, dataset_id = self.make_simple_dataset(
        )
        self.start_ingestion(stream_id, dataset_id)

        event = Event()

        def cb(*args, **kwargs):
            event.set()

        sub = EventSubscriber(event_type="ExceptionEvent",
                              callback=cb,
                              origin="stream_exception")
        sub.start()

        self.publish_fake_data(stream_id, route)
        self.wait_until_we_have_enough_granules(dataset_id, 40)

        file_path = DatasetManagementService._get_coverage_path(dataset_id)
        master_file = os.path.join(file_path, '%s_master.hdf5' % dataset_id)

        with open(master_file, 'w') as f:
            f.write('this will crash HDF')

        self.publish_hifi(stream_id, route, 5)

        self.assertTrue(event.wait(10))

        sub.stop()
示例#22
0
class GenericIntHelperTestCase(IonIntegrationTestCase):

    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.generic_int_helper_rr  = ResourceRegistryServiceClient(node=self.container.node)
        self.generic_int_helper_rr2 = EnhancedResourceRegistryClient(self.generic_int_helper_rr)


    def assert_lcs_fail(self,
                        owner_service,
                        resource_label,
                        resource_id,
                        lc_event):
        """
        execute an lcs event and verify that it fails

        @param owner_service instance of service client that will handle the request
        @param resource_label string like "instrument_device"
        @param resource_id string
        @param lc_event string like LCE.INTEGRATE
        """

        lcsmethod = getattr(owner_service, "execute_%s_lifecycle" % resource_label)
        #lcsmethod(resource_id, lc_event)
        if True:
            log.warn("Skipping generic_lcs_fail for beta testing purposes")
            return
        self.assertRaises(Unauthorized, lcsmethod, resource_id, lc_event)


    def assert_lcs_pass(self,
                        owner_service,
                        resource_label,
                        resource_id,
                        lc_event,
                        lc_state):
        """
        execute an lcs event and verify that it passes and affects state

        @param owner_service instance of service client that will handle the request
        @param resource_label string like "instrument_device"
        @param resource_id string
        @param lc_event string like LCE.INTEGRATE
        @param lc_state string like LCS.INTEGRATED (where the state should end up
        """

        lcsmethod  = getattr(owner_service, "execute_%s_lifecycle" % resource_label)
        readmethod = getattr(owner_service, "read_%s" % resource_label)

        lcsmethod(resource_id, lc_event)
        resource_obj = readmethod(resource_id)

        self.assertEqual(lc_state, resource_obj.lcstate)




    def perform_association_script(self,
                                   assign_obj_to_subj_fn,
                                   find_subj_fn,
                                   find_obj_fn,
                                   subj_id,
                                   obj_id):
        """
        create an association and test that it went properly

        @param assign_obj_to_subj_fn the service method that takes (obj, subj) and associates them
        @param find_subj_fn the service method that returns a list of subjects given an object
        @param find_obj_fn the service method that returns a list of objects given a subject
        @param subj_id the subject id to associate
        @param obj_id the object id to associate
        """
        initial_subj_count = len(find_subj_fn(obj_id))
        initial_obj_count  = len(find_obj_fn(subj_id))

        log.debug("Creating association")
        if not ("str" == type(subj_id).__name__ == type(obj_id).__name__):
            raise NotImplementedError("%s='%s' to %s='%s'" %
                                      (type(subj_id), str(subj_id), type(obj_id), str(obj_id)))
        if not (subj_id and obj_id):
            raise NotImplementedError("%s='%s' to %s='%s'" %
                                      (type(subj_id), str(subj_id), type(obj_id), str(obj_id)))
        assign_obj_to_subj_fn(obj_id, subj_id)

        log.debug("Verifying find-subj-by-obj")
        subjects = find_subj_fn(obj_id)
        self.assertEqual(initial_subj_count + 1, len(subjects))
        subject_ids = []
        for x in subjects:
            if not "_id" in x:
                raise Inconsistent("'_id' field not found in resource! got: %s" % str(x))
            subject_ids.append(x._id)
        self.assertIn(subj_id, subject_ids)

        log.debug("Verifying find-obj-by-subj")
        objects = find_obj_fn(subj_id)
        self.assertEqual(initial_obj_count + 1, len(objects))
        object_ids = []
        for x in objects:
            if not "_id" in x:
                raise Inconsistent("'_id' field not found in resource! got: %s" % str(x))
            object_ids.append(x._id)
        self.assertIn(obj_id, object_ids)



    def perform_fd_script(self, resource_id, resource_label, owner_service):
        """
        delete a resource and check that it was properly deleted

        @param resource_id id to be deleted
        @param resource_label something like platform_model
        @param owner_service service client instance
        """

        del_op = getattr(owner_service, "force_delete_%s" % resource_label)

        del_op(resource_id)

        # try again to make sure that we get NotFound
        self.assertRaises(NotFound, del_op, resource_id)


    def perform_fcruf_script(self, resource_iontype, resource_label, owner_service, actual_obj=None, extra_fn=None):
        """
        run through find, create, read, update, and find ops on a basic resource

        NO DELETE in here.

        @param resource_iontype something like RT.PlatformModel
        @param resource_label something like platform_model
        @param owner_service a service client instance like InstrumentManagementServiceClient
        @param actual_obj use this IonObject instead of a generic object for testing
        @param extra_fn a function to run after the script, taking the new resource id as input
        @return generic_id, the resource_id of the generic resource that was created
        """



        # this section is just to make the LCA integration script easier to write.
        #
        # each resource type gets put through (essentially) the same steps.
        #
        # so, we just set up a generic service-esque object.
        # (basically just a nice package of shortcuts):
        #  create a fake service object and populate it with the methods we need

        some_service = DotDict()

        def fill(svc, method):
            """
            make a "shortcut service" for testing crud ops.
            @param svc a dotdict
            @param method the method name to add
            """

            realmethod = "%s_widget" % method

            setattr(svc, realmethod,
                    getattr(owner_service, "%s_%s" % (method, resource_label)))



        fill(some_service, "create")
        fill(some_service, "read")
        fill(some_service, "update")
        fill(some_service, "delete")

        def find_widgets():
            ret, _ = self.generic_int_helper_rr.find_resources(resource_iontype, None, None, False)
            return ret

        #UX team: generic script for LCA resource operations begins here.
        # some_service will be replaced with whatever service you're calling
        # widget will be replaced with whatever resource you're working with
        # resource_label will be data_product or logical_instrument


        log.info("Finding %s objects", resource_label)
        num_objs = len(find_widgets())
        log.info("I found %d %s objects", num_objs, resource_label)

        generic_obj = actual_obj or any_old(resource_iontype)
        log.info("Creating a %s with name='%s'", resource_label, generic_obj.name)
        generic_id = some_service.create_widget(generic_obj)
        self.assertIsNotNone(generic_id, "%s failed its creation" % resource_iontype)

        log.info("Reading %s '%s'", resource_label, generic_id)
        generic_ret = some_service.read_widget(generic_id)

        log.info("Verifying equality of stored and retrieved object")
        self.assertEqual(generic_obj.name, generic_ret.name)
        self.assertEqual(generic_obj.description, generic_ret.description)

        log.info("Updating %s '%s'", resource_label, generic_id)
        generic_newname = "%s updated" % generic_ret.name
        generic_ret.name = generic_newname
        some_service.update_widget(generic_ret)

        log.info("Reading %s '%s' to verify update", resource_iontype, generic_id)
        generic_ret = some_service.read_widget(generic_id)

        self.assertEqual(generic_newname, generic_ret.name)
        self.assertEqual(generic_obj.description, generic_ret.description)

        log.info("Finding %s objects... checking that there's a new one", resource_iontype)
        num_objs2 = len(find_widgets())

        log.info("There were %s and now there are %s", num_objs, num_objs2)
        self.assertTrue(num_objs2 > num_objs)

        if not extra_fn is None:
            log.info("Running extra_fn on generic resource '%s'" % generic_id)
            extra_fn(generic_id)

        log.info("Returning %s with id '%s'", resource_iontype, generic_id)
        return generic_id
示例#23
0
class TestWorkflowManagementIntegration(IonIntegrationTestCase):
    def setUp(self):
        # Start container

        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(
            node=self.container.node)
        self.pubsubclient = PubsubManagementServiceClient(
            node=self.container.node)
        self.ingestclient = IngestionManagementServiceClient(
            node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(
            node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(
            node=self.container.node)
        self.datasetclient = DatasetManagementServiceClient(
            node=self.container.node)
        self.workflowclient = WorkflowManagementServiceClient(
            node=self.container.node)
        self.process_dispatcher = ProcessDispatcherServiceClient(
            node=self.container.node)

        self.ctd_stream_def = SBE37_CDM_stream_definition()

    def _create_ctd_input_stream_and_data_product(
            self, data_product_name='ctd_parsed'):

        cc = self.container
        assertions = self.assertTrue

        #-------------------------------
        # Create CTD Parsed as the initial data product
        #-------------------------------
        # create a stream definition for the data from the ctd simulator
        ctd_stream_def = SBE37_CDM_stream_definition()
        ctd_stream_def_id = self.pubsubclient.create_stream_definition(
            container=self.ctd_stream_def, name='Simulated CTD data')

        log.debug('Creating new CDM data product with a stream definition')
        dp_obj = IonObject(RT.DataProduct,
                           name=data_product_name,
                           description='ctd stream test')
        try:
            ctd_parsed_data_product_id = self.dataproductclient.create_data_product(
                dp_obj, ctd_stream_def_id)
        except Exception as ex:
            self.fail("failed to create new data product: %s" % ex)

        log.debug('new ctd_parsed_data_product_id = ',
                  ctd_parsed_data_product_id)

        #Only ever need one device for testing purposes.
        instDevice_obj, _ = self.rrclient.find_resources(
            restype=RT.InstrumentDevice, name='SBE37IMDevice')
        if instDevice_obj:
            instDevice_id = instDevice_obj[0]._id
        else:
            instDevice_obj = IonObject(RT.InstrumentDevice,
                                       name='SBE37IMDevice',
                                       description="SBE37IMDevice",
                                       serial_number="12345")
            instDevice_id = self.imsclient.create_instrument_device(
                instrument_device=instDevice_obj)

        self.damsclient.assign_data_product(
            input_resource_id=instDevice_id,
            data_product_id=ctd_parsed_data_product_id)

        self.dataproductclient.activate_data_product_persistence(
            data_product_id=ctd_parsed_data_product_id,
            persist_data=False,
            persist_metadata=False)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(ctd_parsed_data_product_id,
                                                   PRED.hasStream, None, True)
        assertions(len(stream_ids) > 0)
        ctd_stream_id = stream_ids[0]

        return ctd_stream_id, ctd_parsed_data_product_id

    def _start_simple_input_stream_process(self, ctd_stream_id):
        return self._start_input_stream_process(ctd_stream_id)

    def _start_sinusoidal_input_stream_process(self, ctd_stream_id):
        return self._start_input_stream_process(
            ctd_stream_id, 'ion.processes.data.sinusoidal_stream_publisher',
            'SinusoidalCtdPublisher')

    def _start_input_stream_process(
            self,
            ctd_stream_id,
            module='ion.processes.data.ctd_stream_publisher',
            class_name='SimpleCtdPublisher'):

        ###
        ### Start the process for producing the CTD data
        ###
        # process definition for the ctd simulator...
        producer_definition = ProcessDefinition()
        producer_definition.executable = {
            'module': module,
            'class': class_name
        }

        ctd_sim_procdef_id = self.process_dispatcher.create_process_definition(
            process_definition=producer_definition)

        # Start the ctd simulator to produce some data
        configuration = {
            'process': {
                'stream_id': ctd_stream_id,
            }
        }
        ctd_sim_pid = self.process_dispatcher.schedule_process(
            process_definition_id=ctd_sim_procdef_id,
            configuration=configuration)

        return ctd_sim_pid

    def _start_output_stream_listener(self,
                                      data_product_stream_ids,
                                      message_count_per_stream=10):

        cc = self.container
        assertions = self.assertTrue

        ###
        ### Make a subscriber in the test to listen for transformed data
        ###
        salinity_subscription_id = self.pubsubclient.create_subscription(
            query=StreamQuery(data_product_stream_ids),
            exchange_name='workflow_test',
            name="test workflow transformations",
        )

        pid = cc.spawn_process(name='dummy_process_for_test',
                               module='pyon.ion.process',
                               cls='SimpleProcess',
                               config={})
        dummy_process = cc.proc_manager.procs[pid]

        subscriber_registrar = StreamSubscriberRegistrar(process=dummy_process,
                                                         node=cc.node)

        result = gevent.event.AsyncResult()
        results = []

        def message_received(message, headers):
            # Heads
            log.warn(' data received!')
            results.append(message)
            if len(results) >= len(
                    data_product_stream_ids
            ) * message_count_per_stream:  #Only wait for so many messages - per stream
                result.set(True)

        subscriber = subscriber_registrar.create_subscriber(
            exchange_name='workflow_test', callback=message_received)
        subscriber.start()

        # after the queue has been created it is safe to activate the subscription
        self.pubsubclient.activate_subscription(
            subscription_id=salinity_subscription_id)

        # Assert that we have received data
        assertions(result.get(timeout=30))

        self.pubsubclient.deactivate_subscription(
            subscription_id=salinity_subscription_id)

        subscriber.stop()

        return results

    def _validate_messages(self, results):

        cc = self.container
        assertions = self.assertTrue

        first_salinity_values = None

        for message in results:

            try:
                psd = PointSupplementStreamParser(
                    stream_definition=self.ctd_stream_def,
                    stream_granule=message)
                temp = psd.get_values('temperature')
                log.info(psd.list_field_names())
            except KeyError as ke:
                temp = None

            if temp is not None:
                assertions(isinstance(temp, numpy.ndarray))

                log.info('temperature=' + str(numpy.nanmin(temp)))

                first_salinity_values = None

            else:
                psd = PointSupplementStreamParser(
                    stream_definition=SalinityTransform.outgoing_stream_def,
                    stream_granule=message)
                log.info(psd.list_field_names())

                # Test the handy info method for the names of fields in the stream def
                assertions('salinity' in psd.list_field_names())

                # you have to know the name of the coverage in stream def
                salinity = psd.get_values('salinity')
                log.info('salinity=' + str(numpy.nanmin(salinity)))

                assertions(isinstance(salinity, numpy.ndarray))

                assertions(numpy.nanmin(salinity) >
                           0.0)  # salinity should always be greater than 0

                if first_salinity_values is None:
                    first_salinity_values = salinity.tolist()
                else:
                    second_salinity_values = salinity.tolist()
                    assertions(
                        len(first_salinity_values) == len(
                            second_salinity_values))
                    for idx in range(0, len(first_salinity_values)):
                        assertions(first_salinity_values[idx] *
                                   2.0 == second_salinity_values[idx])

    def _create_salinity_data_process_definition(self):

        # Salinity: Data Process Definition

        #First look to see if it exists and if not, then create it
        dpd, _ = self.rrclient.find_resources(restype=RT.DataProcessDefinition,
                                              name='ctd_salinity')
        if len(dpd) > 0:
            return dpd[0]

        log.debug("Create data process definition SalinityTransform")
        dpd_obj = IonObject(
            RT.DataProcessDefinition,
            name='ctd_salinity',
            description='create a salinity data product',
            module='ion.processes.data.transforms.ctd.ctd_L2_salinity',
            class_name='SalinityTransform',
            process_source='SalinityTransform source code here...')
        try:
            ctd_L2_salinity_dprocdef_id = self.dataprocessclient.create_data_process_definition(
                dpd_obj)
        except Excpetion as ex:
            self.fail(
                "failed to create new SalinityTransform data process definition: %s"
                % ex)

        # create a stream definition for the data from the salinity Transform
        sal_stream_def_id = self.pubsubclient.create_stream_definition(
            container=SalinityTransform.outgoing_stream_def, name='Salinity')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            sal_stream_def_id, ctd_L2_salinity_dprocdef_id)

        return ctd_L2_salinity_dprocdef_id

    def _create_salinity_doubler_data_process_definition(self):

        #First look to see if it exists and if not, then create it
        dpd, _ = self.rrclient.find_resources(restype=RT.DataProcessDefinition,
                                              name='salinity_doubler')
        if len(dpd) > 0:
            return dpd[0]

        # Salinity Doubler: Data Process Definition
        log.debug("Create data process definition SalinityDoublerTransform")
        dpd_obj = IonObject(
            RT.DataProcessDefinition,
            name='salinity_doubler',
            description='create a salinity doubler data product',
            module='ion.processes.data.transforms.example_double_salinity',
            class_name='SalinityDoubler',
            process_source='SalinityDoubler source code here...')
        try:
            salinity_doubler_dprocdef_id = self.dataprocessclient.create_data_process_definition(
                dpd_obj)
        except Exception as ex:
            self.fail(
                "failed to create new SalinityDoubler data process definition: %s"
                % ex)

        # create a stream definition for the data from the salinity Transform
        salinity_double_stream_def_id = self.pubsubclient.create_stream_definition(
            container=SalinityDoubler.outgoing_stream_def,
            name='SalinityDoubler')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            salinity_double_stream_def_id, salinity_doubler_dprocdef_id)

        return salinity_doubler_dprocdef_id

    def create_transform_process(self, data_process_definition_id,
                                 data_process_input_dp_id):

        data_process_definition = self.rrclient.read(
            data_process_definition_id)

        # Find the link between the output Stream Definition resource and the Data Process Definition resource
        stream_ids, _ = self.rrclient.find_objects(data_process_definition._id,
                                                   PRED.hasStreamDefinition,
                                                   RT.StreamDefinition,
                                                   id_only=True)
        if not stream_ids:
            raise Inconsistent(
                "The data process definition %s is missing an association to an output stream definition"
                % data_process_definition._id)
        process_output_stream_def_id = stream_ids[0]

        #Concatenate the name of the workflow and data process definition for the name of the data product output
        data_process_name = data_process_definition.name

        # Create the output data product of the transform
        transform_dp_obj = IonObject(
            RT.DataProduct,
            name=data_process_name,
            description=data_process_definition.description)
        transform_dp_id = self.dataproductclient.create_data_product(
            transform_dp_obj, process_output_stream_def_id)
        self.dataproductclient.activate_data_product_persistence(
            data_product_id=transform_dp_id,
            persist_data=True,
            persist_metadata=True)

        #last one out of the for loop is the output product id
        output_data_product_id = transform_dp_id

        # Create the  transform data process
        log.debug("create data_process and start it")
        data_process_id = self.dataprocessclient.create_data_process(
            data_process_definition._id, [data_process_input_dp_id],
            {'output': transform_dp_id})
        self.dataprocessclient.activate_data_process(data_process_id)

        #Find the id of the output data stream
        stream_ids, _ = self.rrclient.find_objects(transform_dp_id,
                                                   PRED.hasStream, None, True)
        if not stream_ids:
            raise Inconsistent(
                "The data process %s is missing an association to an output stream"
                % data_process_id)

        return data_process_id, output_data_product_id

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),
                     'Not integrated for CEI')
    #@unittest.skip("Skipping for debugging ")
    def test_SA_transform_components(self):

        assertions = self.assertTrue

        #The list of data product streams to monitor
        data_product_stream_ids = list()

        #Create the input data product
        ctd_stream_id, ctd_parsed_data_product_id = self._create_ctd_input_stream_and_data_product(
        )
        data_product_stream_ids.append(ctd_stream_id)

        ###
        ###  Setup the first transformation
        ###

        # Salinity: Data Process Definition
        ctd_L2_salinity_dprocdef_id = self._create_salinity_data_process_definition(
        )

        l2_salinity_all_data_process_id, ctd_l2_salinity_output_dp_id = self.create_transform_process(
            ctd_L2_salinity_dprocdef_id, ctd_parsed_data_product_id)

        ## get the stream id for the transform outputs
        stream_ids, _ = self.rrclient.find_objects(
            ctd_l2_salinity_output_dp_id, PRED.hasStream, None, True)
        assertions(len(stream_ids) > 0)
        sal_stream_id = stream_ids[0]
        data_product_stream_ids.append(sal_stream_id)

        ###
        ###  Setup the second transformation
        ###

        # Salinity Doubler: Data Process Definition
        salinity_doubler_dprocdef_id = self._create_salinity_doubler_data_process_definition(
        )

        salinity_double_data_process_id, salinity_doubler_output_dp_id = self.create_transform_process(
            salinity_doubler_dprocdef_id, ctd_l2_salinity_output_dp_id)

        stream_ids, _ = self.rrclient.find_objects(
            salinity_doubler_output_dp_id, PRED.hasStream, None, True)
        assertions(len(stream_ids) > 0)
        sal_dbl_stream_id = stream_ids[0]
        data_product_stream_ids.append(sal_dbl_stream_id)

        #Start the input stream process
        ctd_sim_pid = self._start_simple_input_stream_process(ctd_stream_id)

        #Start te output stream listener to monitor and verify messages
        results = self._start_output_stream_listener(data_product_stream_ids)

        #Stop the transform processes
        self.dataprocessclient.deactivate_data_process(
            salinity_double_data_process_id)
        self.dataprocessclient.deactivate_data_process(
            l2_salinity_all_data_process_id)

        # stop the flow parse the messages...
        self.process_dispatcher.cancel_process(
            ctd_sim_pid
        )  # kill the ctd simulator process - that is enough data

        #Validate the data from each of the messages along the way
        self._validate_messages(results)

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),
                     'Not integrated for CEI')
    #@unittest.skip("Skipping for debugging ")
    def test_transform_workflow(self):

        assertions = self.assertTrue

        # Build the workflow definition
        workflow_def_obj = IonObject(
            RT.WorkflowDefinition,
            name='Salinity_Test_Workflow',
            description='tests a workflow of multiple transform data processes'
        )

        workflow_data_product_name = 'TEST-Workflow_Output_Product'  #Set a specific output product name

        #Add a transformation process definition
        ctd_L2_salinity_dprocdef_id = self._create_salinity_data_process_definition(
        )
        workflow_step_obj = IonObject(
            'DataProcessWorkflowStep',
            data_process_definition_id=ctd_L2_salinity_dprocdef_id,
            persist_process_output_data=False
        )  #Don't persist the intermediate data product
        workflow_def_obj.workflow_steps.append(workflow_step_obj)

        #Add a transformation process definition
        salinity_doubler_dprocdef_id = self._create_salinity_doubler_data_process_definition(
        )
        workflow_step_obj = IonObject(
            'DataProcessWorkflowStep',
            data_process_definition_id=salinity_doubler_dprocdef_id,
            output_data_product_name=workflow_data_product_name)
        workflow_def_obj.workflow_steps.append(workflow_step_obj)

        #Create it in the resource registry
        workflow_def_id = self.workflowclient.create_workflow_definition(
            workflow_def_obj)

        aids = self.rrclient.find_associations(workflow_def_id,
                                               PRED.hasDataProcessDefinition)
        assertions(len(aids) == 2)

        #The list of data product streams to monitor
        data_product_stream_ids = list()

        #Create the input data product
        ctd_stream_id, ctd_parsed_data_product_id = self._create_ctd_input_stream_and_data_product(
        )
        data_product_stream_ids.append(ctd_stream_id)

        #Create and start the workflow
        workflow_id, workflow_product_id = self.workflowclient.create_data_process_workflow(
            workflow_def_id, ctd_parsed_data_product_id, timeout=30)

        workflow_output_ids, _ = self.rrclient.find_subjects(
            RT.Workflow, PRED.hasOutputProduct, workflow_product_id, True)
        assertions(len(workflow_output_ids) == 1)

        #Verify the output data product name matches what was specified in the workflow definition
        workflow_product = self.rrclient.read(workflow_product_id)
        assertions(workflow_product.name == workflow_data_product_name)

        #Walk the associations to find the appropriate output data streams to validate the messages
        workflow_dp_ids, _ = self.rrclient.find_objects(
            workflow_id, PRED.hasDataProduct, RT.DataProduct, True)
        assertions(len(workflow_dp_ids) == 2)

        for dp_id in workflow_dp_ids:
            stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStream,
                                                       None, True)
            assertions(len(stream_ids) == 1)
            data_product_stream_ids.append(stream_ids[0])

        #Start the input stream process
        ctd_sim_pid = self._start_simple_input_stream_process(ctd_stream_id)

        #Start the output stream listener to monitor and verify messages
        results = self._start_output_stream_listener(data_product_stream_ids)

        #Stop the workflow processes
        self.workflowclient.terminate_data_process_workflow(
            workflow_id, False, timeout=15)  # Should test true at some point

        #Make sure the Workflow object was removed
        objs, _ = self.rrclient.find_resources(restype=RT.Workflow)
        assertions(len(objs) == 0)

        # stop the flow parse the messages...
        self.process_dispatcher.cancel_process(
            ctd_sim_pid
        )  # kill the ctd simulator process - that is enough data

        #Validate the data from each of the messages along the way
        self._validate_messages(results)

        #Cleanup to make sure delete is correct.
        self.workflowclient.delete_workflow_definition(workflow_def_id)

        workflow_def_ids, _ = self.rrclient.find_resources(
            restype=RT.WorkflowDefinition)
        assertions(len(workflow_def_ids) == 0)

    def _create_google_dt_data_process_definition(self):

        #First look to see if it exists and if not, then create it
        dpd, _ = self.rrclient.find_resources(restype=RT.DataProcessDefinition,
                                              name='google_dt_transform')
        if len(dpd) > 0:
            return dpd[0]

        # Data Process Definition
        log.debug("Create data process definition GoogleDtTransform")
        dpd_obj = IonObject(
            RT.DataProcessDefinition,
            name='google_dt_transform',
            description='Convert data streams to Google DataTables',
            module='ion.processes.data.transforms.viz.google_dt',
            class_name='VizTransformGoogleDT',
            process_source='VizTransformGoogleDT source code here...')
        try:
            procdef_id = self.dataprocessclient.create_data_process_definition(
                dpd_obj)
        except Exception as ex:
            self.fail(
                "failed to create new VizTransformGoogleDT data process definition: %s"
                % ex)

        # create a stream definition for the data from the
        stream_def_id = self.pubsubclient.create_stream_definition(
            container=VizTransformGoogleDT.outgoing_stream_def,
            name='VizTransformGoogleDT')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            stream_def_id, procdef_id)

        return procdef_id

    def _validate_google_dt_results(self, results_stream_def, results):

        cc = self.container
        assertions = self.assertTrue

        for g in results:

            if isinstance(g, Granule):

                tx = TaxyTool.load_from_granule(g)
                rdt = RecordDictionaryTool.load_from_granule(g)
                #log.warn(tx.pretty_print())
                #log.warn(rdt.pretty_print())

                gdt_component = rdt['google_dt_components'][0]

                assertions(
                    gdt_component['viz_product_type'] == 'google_realtime_dt')
                gdt_description = gdt_component['data_table_description']
                gdt_content = gdt_component['data_table_content']

                assertions(gdt_description[0][0] == 'time')
                assertions(len(gdt_description) > 1)
                assertions(len(gdt_content) >= 0)

        return

    def _create_mpl_graphs_data_process_definition(self):

        #First look to see if it exists and if not, then create it
        dpd, _ = self.rrclient.find_resources(restype=RT.DataProcessDefinition,
                                              name='mpl_graphs_transform')
        if len(dpd) > 0:
            return dpd[0]

        #Data Process Definition
        log.debug("Create data process definition MatplotlibGraphsTransform")
        dpd_obj = IonObject(
            RT.DataProcessDefinition,
            name='mpl_graphs_transform',
            description='Convert data streams to Matplotlib graphs',
            module='ion.processes.data.transforms.viz.matplotlib_graphs',
            class_name='VizTransformMatplotlibGraphs',
            process_source='VizTransformMatplotlibGraphs source code here...')
        try:
            procdef_id = self.dataprocessclient.create_data_process_definition(
                dpd_obj)
        except Exception as ex:
            self.fail(
                "failed to create new VizTransformMatplotlibGraphs data process definition: %s"
                % ex)

        # create a stream definition for the data
        stream_def_id = self.pubsubclient.create_stream_definition(
            container=VizTransformMatplotlibGraphs.outgoing_stream_def,
            name='VizTransformMatplotlibGraphs')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            stream_def_id, procdef_id)

        return procdef_id

    def _validate_mpl_graphs_results(self, results_stream_def, results):

        cc = self.container
        assertions = self.assertTrue

        for g in results:
            if isinstance(g, Granule):

                tx = TaxyTool.load_from_granule(g)
                rdt = RecordDictionaryTool.load_from_granule(g)
                #log.warn(tx.pretty_print())
                #log.warn(rdt.pretty_print())

                graphs = rdt['matplotlib_graphs']

                for graph in graphs:
                    assertions(
                        graph['viz_product_type'] == 'matplotlib_graphs')
                    # check to see if the list (numpy array) contians actual images
                    assertions(
                        imghdr.what(graph['image_name'], graph['image_obj']) ==
                        'png')

        return

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),
                     'Not integrated for CEI')
    #unittest.skip("Skipping for debugging ")
    def test_google_dt_transform_workflow(self):

        assertions = self.assertTrue

        # Build the workflow definition
        workflow_def_obj = IonObject(
            RT.WorkflowDefinition,
            name='GoogleDT_Test_Workflow',
            description=
            'Tests the workflow of converting stream data to Google DT')

        #Add a transformation process definition
        google_dt_procdef_id = self._create_google_dt_data_process_definition()
        workflow_step_obj = IonObject(
            'DataProcessWorkflowStep',
            data_process_definition_id=google_dt_procdef_id)
        workflow_def_obj.workflow_steps.append(workflow_step_obj)

        #Create it in the resource registry
        workflow_def_id = self.workflowclient.create_workflow_definition(
            workflow_def_obj)

        #The list of data product streams to monitor
        data_product_stream_ids = list()

        #Create the input data product
        ctd_stream_id, ctd_parsed_data_product_id = self._create_ctd_input_stream_and_data_product(
        )
        data_product_stream_ids.append(ctd_stream_id)

        #Create and start the workflow
        workflow_id, workflow_product_id = self.workflowclient.create_data_process_workflow(
            workflow_def_id, ctd_parsed_data_product_id)

        workflow_output_ids, _ = self.rrclient.find_subjects(
            RT.Workflow, PRED.hasOutputProduct, workflow_product_id, True)
        assertions(len(workflow_output_ids) == 1)

        #Walk the associations to find the appropriate output data streams to validate the messages
        workflow_dp_ids, _ = self.rrclient.find_objects(
            workflow_id, PRED.hasDataProduct, RT.DataProduct, True)
        assertions(len(workflow_dp_ids) == 1)

        for dp_id in workflow_dp_ids:
            stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStream,
                                                       None, True)
            assertions(len(stream_ids) == 1)
            data_product_stream_ids.append(stream_ids[0])

        #Start the input stream process
        ctd_sim_pid = self._start_simple_input_stream_process(ctd_stream_id)

        #Start the output stream listener to monitor and verify messages
        results = self._start_output_stream_listener(data_product_stream_ids)

        #Stop the workflow processes
        self.workflowclient.terminate_data_process_workflow(
            workflow_id, False)  # Should test true at some point

        # stop the flow parse the messages...
        self.process_dispatcher.cancel_process(
            ctd_sim_pid
        )  # kill the ctd simulator process - that is enough data

        #Validate the data from each of the messages along the way
        self._validate_google_dt_results(
            VizTransformGoogleDT.outgoing_stream_def, results)

        #Cleanup to make sure delete is correct.
        self.workflowclient.delete_workflow_definition(workflow_def_id)

        workflow_def_ids, _ = self.rrclient.find_resources(
            restype=RT.WorkflowDefinition)
        assertions(len(workflow_def_ids) == 0)

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),
                     'Not integrated for CEI')
    #@unittest.skip("Skipping for debugging ")
    def test_mpl_graphs_transform_workflow(self):

        assertions = self.assertTrue

        # Build the workflow definition
        workflow_def_obj = IonObject(
            RT.WorkflowDefinition,
            name='Mpl_Graphs_Test_Workflow',
            description=
            'Tests the workflow of converting stream data to Matplotlib graphs'
        )

        #Add a transformation process definition
        mpl_graphs_procdef_id = self._create_mpl_graphs_data_process_definition(
        )
        workflow_step_obj = IonObject(
            'DataProcessWorkflowStep',
            data_process_definition_id=mpl_graphs_procdef_id)
        workflow_def_obj.workflow_steps.append(workflow_step_obj)

        #Create it in the resource registry
        workflow_def_id = self.workflowclient.create_workflow_definition(
            workflow_def_obj)

        #The list of data product streams to monitor
        data_product_stream_ids = list()

        #Create the input data product
        ctd_stream_id, ctd_parsed_data_product_id = self._create_ctd_input_stream_and_data_product(
        )
        data_product_stream_ids.append(ctd_stream_id)

        #Create and start the workflow
        workflow_id, workflow_product_id = self.workflowclient.create_data_process_workflow(
            workflow_def_id, ctd_parsed_data_product_id)

        workflow_output_ids, _ = self.rrclient.find_subjects(
            RT.Workflow, PRED.hasOutputProduct, workflow_product_id, True)
        assertions(len(workflow_output_ids) == 1)

        #Walk the associations to find the appropriate output data streams to validate the messages
        workflow_dp_ids, _ = self.rrclient.find_objects(
            workflow_id, PRED.hasDataProduct, RT.DataProduct, True)
        assertions(len(workflow_dp_ids) == 1)

        for dp_id in workflow_dp_ids:
            stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStream,
                                                       None, True)
            assertions(len(stream_ids) == 1)
            data_product_stream_ids.append(stream_ids[0])

        #Start the input stream process
        ctd_sim_pid = self._start_sinusoidal_input_stream_process(
            ctd_stream_id)

        #Start the output stream listener to monitor and verify messages
        results = self._start_output_stream_listener(data_product_stream_ids)

        #Stop the workflow processes
        self.workflowclient.terminate_data_process_workflow(
            workflow_id, False)  # Should test true at some point

        # stop the flow parse the messages...
        self.process_dispatcher.cancel_process(
            ctd_sim_pid
        )  # kill the ctd simulator process - that is enough data

        #Validate the data from each of the messages along the way
        self._validate_mpl_graphs_results(
            VizTransformGoogleDT.outgoing_stream_def, results)

        #Cleanup to make sure delete is correct.
        self.workflowclient.delete_workflow_definition(workflow_def_id)

        workflow_def_ids, _ = self.rrclient.find_resources(
            restype=RT.WorkflowDefinition)
        assertions(len(workflow_def_ids) == 0)

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),
                     'Not integrated for CEI')
    #@unittest.skip("Skipping for debugging ")
    def test_multiple_workflow_instances(self):

        assertions = self.assertTrue

        # Build the workflow definition
        workflow_def_obj = IonObject(
            RT.WorkflowDefinition,
            name='Multiple_Test_Workflow',
            description='Tests the workflow of converting stream data')

        #Add a transformation process definition
        google_dt_procdef_id = self._create_google_dt_data_process_definition()
        workflow_step_obj = IonObject(
            'DataProcessWorkflowStep',
            data_process_definition_id=google_dt_procdef_id)
        workflow_def_obj.workflow_steps.append(workflow_step_obj)

        #Create it in the resource registry
        workflow_def_id = self.workflowclient.create_workflow_definition(
            workflow_def_obj)

        #The list of data product streams to monitor
        data_product_stream_ids = list()

        #Create the first input data product
        ctd_stream_id1, ctd_parsed_data_product_id1 = self._create_ctd_input_stream_and_data_product(
            'ctd_parsed1')
        data_product_stream_ids.append(ctd_stream_id1)

        #Create and start the first workflow
        workflow_id1, workflow_product_id1 = self.workflowclient.create_data_process_workflow(
            workflow_def_id, ctd_parsed_data_product_id1)

        #Create the second input data product
        ctd_stream_id2, ctd_parsed_data_product_id2 = self._create_ctd_input_stream_and_data_product(
            'ctd_parsed2')
        data_product_stream_ids.append(ctd_stream_id2)

        #Create and start the first workflow
        workflow_id2, workflow_product_id2 = self.workflowclient.create_data_process_workflow(
            workflow_def_id, ctd_parsed_data_product_id2)

        #Walk the associations to find the appropriate output data streams to validate the messages
        workflow_ids, _ = self.rrclient.find_resources(restype=RT.Workflow)
        assertions(len(workflow_ids) == 2)

        #Start the first input stream process
        ctd_sim_pid1 = self._start_sinusoidal_input_stream_process(
            ctd_stream_id1)

        #Start the second input stream process
        ctd_sim_pid2 = self._start_simple_input_stream_process(ctd_stream_id2)

        #Start the output stream listener to monitor a set number of messages being sent through the workflows
        results = self._start_output_stream_listener(
            data_product_stream_ids, message_count_per_stream=5)

        # stop the flow of messages...
        self.process_dispatcher.cancel_process(
            ctd_sim_pid1
        )  # kill the ctd simulator process - that is enough data
        self.process_dispatcher.cancel_process(ctd_sim_pid2)

        #Stop the first workflow processes
        self.workflowclient.terminate_data_process_workflow(
            workflow_id1, False)  # Should test true at some point

        #Stop the second workflow processes
        self.workflowclient.terminate_data_process_workflow(
            workflow_id2, False)  # Should test true at some point

        workflow_ids, _ = self.rrclient.find_resources(restype=RT.Workflow)
        assertions(len(workflow_ids) == 0)

        #Cleanup to make sure delete is correct.
        self.workflowclient.delete_workflow_definition(workflow_def_id)

        workflow_def_ids, _ = self.rrclient.find_resources(
            restype=RT.WorkflowDefinition)
        assertions(len(workflow_def_ids) == 0)

        aid_list = self.rrclient.find_associations(
            workflow_def_id, PRED.hasDataProcessDefinition)
        assertions(len(aid_list) == 0)
示例#24
0
class TestDeployment(IonIntegrationTestCase):
    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.omsclient = ObservatoryManagementServiceClient(
            node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(
            node=self.container.node)
        self.dmpsclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(
            node=self.container.node)
        self.psmsclient = PubsubManagementServiceClient(
            node=self.container.node)
        self.dataset_management = DatasetManagementServiceClient()

        self.c = DotDict()
        self.c.resource_registry = self.rrclient
        self.RR2 = EnhancedResourceRegistryClient(self.rrclient)

        # create missing data process definition
        self.dsmsclient = DataProcessManagementServiceClient(
            node=self.container.node)
        dpd_obj = IonObject(
            RT.DataProcessDefinition,
            name=LOGICAL_TRANSFORM_DEFINITION_NAME,
            description="normally in preload",
            module='ion.processes.data.transforms.logical_transform',
            class_name='logical_transform')
        self.dsmsclient.create_data_process_definition(dpd_obj)

        # deactivate all data processes when tests are complete
        def killAllDataProcesses():
            for proc_id in self.rrclient.find_resources(
                    RT.DataProcess, None, None, True)[0]:
                self.dsmsclient.deactivate_data_process(proc_id)
                self.dsmsclient.delete_data_process(proc_id)

        self.addCleanup(killAllDataProcesses)

    #@unittest.skip("targeting")
    def test_create_deployment(self):

        #create a deployment with metadata and an initial site and device
        platform_site__obj = IonObject(RT.PlatformSite,
                                       name='PlatformSite1',
                                       description='test platform site')
        site_id = self.omsclient.create_platform_site(platform_site__obj)

        platform_device__obj = IonObject(RT.PlatformDevice,
                                         name='PlatformDevice1',
                                         description='test platform device')
        device_id = self.imsclient.create_platform_device(platform_device__obj)

        start = IonTime(datetime.datetime(2013, 1, 1))
        end = IonTime(datetime.datetime(2014, 1, 1))
        temporal_bounds = IonObject(OT.TemporalBounds,
                                    name='planned',
                                    start_datetime=start.to_string(),
                                    end_datetime=end.to_string())
        deployment_obj = IonObject(RT.Deployment,
                                   name='TestDeployment',
                                   description='some new deployment',
                                   constraint_list=[temporal_bounds])
        deployment_id = self.omsclient.create_deployment(deployment_obj)
        self.omsclient.deploy_platform_site(site_id, deployment_id)
        self.imsclient.deploy_platform_device(device_id, deployment_id)

        log.debug("test_create_deployment: created deployment id: %s ",
                  str(deployment_id))

        #retrieve the deployment objects and check that the assoc site and device are attached
        read_deployment_obj = self.omsclient.read_deployment(deployment_id)
        log.debug("test_create_deployment: created deployment obj: %s ",
                  str(read_deployment_obj))

        site_ids, _ = self.rrclient.find_subjects(RT.PlatformSite,
                                                  PRED.hasDeployment,
                                                  deployment_id, True)
        self.assertEqual(len(site_ids), 1)

        device_ids, _ = self.rrclient.find_subjects(RT.PlatformDevice,
                                                    PRED.hasDeployment,
                                                    deployment_id, True)
        self.assertEqual(len(device_ids), 1)

        #delete the deployment
        self.RR2.pluck(deployment_id)
        self.omsclient.force_delete_deployment(deployment_id)
        # now try to get the deleted dp object
        try:
            self.omsclient.read_deployment(deployment_id)
        except NotFound:
            pass
        else:
            self.fail("deleted deployment was found during read")

    #@unittest.skip("targeting")
    def base_activate_deployment(self):

        #-------------------------------------------------------------------------------------
        # Create platform site, platform device, platform model
        #-------------------------------------------------------------------------------------

        platform_site__obj = IonObject(RT.PlatformSite,
                                       name='PlatformSite1',
                                       description='test platform site')
        platform_site_id = self.omsclient.create_platform_site(
            platform_site__obj)

        platform_device_obj = IonObject(RT.PlatformDevice,
                                        name='PlatformDevice1',
                                        description='test platform device')
        platform_device_id = self.imsclient.create_platform_device(
            platform_device_obj)

        platform_model__obj = IonObject(RT.PlatformModel,
                                        name='PlatformModel1',
                                        description='test platform model')
        platform_model_id = self.imsclient.create_platform_model(
            platform_model__obj)

        #-------------------------------------------------------------------------------------
        # Create instrument site
        #-------------------------------------------------------------------------------------

        instrument_site_obj = IonObject(RT.InstrumentSite,
                                        name='InstrumentSite1',
                                        description='test instrument site')
        instrument_site_id = self.omsclient.create_instrument_site(
            instrument_site_obj, platform_site_id)

        pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            'ctd_parsed_param_dict', id_only=True)
        ctd_stream_def_id = self.psmsclient.create_stream_definition(
            name='SBE37_CDM', parameter_dictionary_id=pdict_id)

        # Construct temporal and spatial Coordinate Reference System objects
        tdom, sdom = time_series_domain()

        sdom = sdom.dump()
        tdom = tdom.dump()

        dp_obj = IonObject(RT.DataProduct,
                           name='Log Data Product',
                           description='some new dp',
                           temporal_domain=tdom,
                           spatial_domain=sdom)

        out_log_data_product_id = self.dmpsclient.create_data_product(
            dp_obj, ctd_stream_def_id)

        #----------------------------------------------------------------------------------------------------
        # Start the transform (a logical transform) that acts as an instrument site
        #----------------------------------------------------------------------------------------------------

        self.omsclient.create_site_data_product(
            site_id=instrument_site_id,
            data_product_id=out_log_data_product_id)

        #----------------------------------------------------------------------------------------------------
        # Create an instrument device
        #----------------------------------------------------------------------------------------------------

        instrument_device_obj = IonObject(RT.InstrumentDevice,
                                          name='InstrumentDevice1',
                                          description='test instrument device')
        instrument_device_id = self.imsclient.create_instrument_device(
            instrument_device_obj)
        self.rrclient.create_association(platform_device_id, PRED.hasDevice,
                                         instrument_device_id)

        dp_obj = IonObject(RT.DataProduct,
                           name='Instrument Data Product',
                           description='some new dp',
                           temporal_domain=tdom,
                           spatial_domain=sdom)

        inst_data_product_id = self.dmpsclient.create_data_product(
            dp_obj, ctd_stream_def_id)

        #assign data products appropriately
        self.damsclient.assign_data_product(
            input_resource_id=instrument_device_id,
            data_product_id=inst_data_product_id)
        #----------------------------------------------------------------------------------------------------
        # Create an instrument model
        #----------------------------------------------------------------------------------------------------

        instrument_model_obj = IonObject(RT.InstrumentModel,
                                         name='InstrumentModel1',
                                         description='test instrument model')
        instrument_model_id = self.imsclient.create_instrument_model(
            instrument_model_obj)

        #----------------------------------------------------------------------------------------------------
        # Create a deployment object
        #----------------------------------------------------------------------------------------------------

        start = IonTime(datetime.datetime(2013, 1, 1))
        end = IonTime(datetime.datetime(2014, 1, 1))
        temporal_bounds = IonObject(OT.TemporalBounds,
                                    name='planned',
                                    start_datetime=start.to_string(),
                                    end_datetime=end.to_string())
        deployment_obj = IonObject(RT.Deployment,
                                   name='TestDeployment',
                                   description='some new deployment',
                                   constraint_list=[temporal_bounds])
        deployment_id = self.omsclient.create_deployment(deployment_obj)

        log.debug("test_create_deployment: created deployment id: %s ",
                  str(deployment_id))

        ret = DotDict(instrument_site_id=instrument_site_id,
                      instrument_device_id=instrument_device_id,
                      instrument_model_id=instrument_model_id,
                      platform_site_id=platform_site_id,
                      platform_device_id=platform_device_id,
                      platform_model_id=platform_model_id,
                      deployment_id=deployment_id)

        return ret

    #@unittest.skip("targeting")
    def test_activate_deployment_normal(self):

        res = self.base_activate_deployment()

        log.debug("assigning platform and instrument models")
        self.imsclient.assign_platform_model_to_platform_device(
            res.platform_model_id, res.platform_device_id)
        self.imsclient.assign_instrument_model_to_instrument_device(
            res.instrument_model_id, res.instrument_device_id)
        self.omsclient.assign_platform_model_to_platform_site(
            res.platform_model_id, res.platform_site_id)
        self.omsclient.assign_instrument_model_to_instrument_site(
            res.instrument_model_id, res.instrument_site_id)

        log.debug("adding instrument site and device to deployment")
        self.omsclient.deploy_instrument_site(res.instrument_site_id,
                                              res.deployment_id)
        self.imsclient.deploy_instrument_device(res.instrument_device_id,
                                                res.deployment_id)

        log.debug("adding platform site and device to deployment")
        self.omsclient.deploy_platform_site(res.platform_site_id,
                                            res.deployment_id)
        self.imsclient.deploy_platform_device(res.platform_device_id,
                                              res.deployment_id)

        log.debug("activating deployment, expecting success")
        self.omsclient.activate_deployment(res.deployment_id)

    #@unittest.skip("targeting")
    def test_activate_deployment_nomodels(self):

        res = self.base_activate_deployment()

        self.omsclient.deploy_instrument_site(res.instrument_site_id,
                                              res.deployment_id)
        self.imsclient.deploy_instrument_device(res.instrument_device_id,
                                                res.deployment_id)

        log.debug(
            "activating deployment without site+device models, expecting fail")
        self.assert_deploy_fail(
            res.deployment_id, "Expected at least 1 model for InstrumentSite")

        log.debug("assigning instrument site model")
        self.omsclient.assign_instrument_model_to_instrument_site(
            res.instrument_model_id, res.instrument_site_id)

        log.debug(
            "activating deployment without device models, expecting fail")
        self.assert_deploy_fail(res.deployment_id,
                                "Expected 1 model for InstrumentDevice")

    #@unittest.skip("targeting")
    def test_activate_deployment_nosite(self):

        res = self.base_activate_deployment()

        log.debug("assigning instrument models")
        self.imsclient.assign_instrument_model_to_instrument_device(
            res.instrument_model_id, res.instrument_device_id)
        self.omsclient.assign_instrument_model_to_instrument_site(
            res.instrument_model_id, res.instrument_site_id)

        log.debug("deploying instrument device only")
        self.imsclient.deploy_instrument_device(res.instrument_device_id,
                                                res.deployment_id)

        log.debug(
            "activating deployment without device models, expecting fail")
        self.assert_deploy_fail(res.deployment_id,
                                "No sites were found in the deployment")

    #@unittest.skip("targeting")
    def test_activate_deployment_nodevice(self):

        res = self.base_activate_deployment()

        log.debug("assigning platform and instrument models")
        self.imsclient.assign_instrument_model_to_instrument_device(
            res.instrument_model_id, res.instrument_device_id)
        self.omsclient.assign_instrument_model_to_instrument_site(
            res.instrument_model_id, res.instrument_site_id)

        log.debug("deploying instrument site only")
        self.omsclient.deploy_instrument_site(res.instrument_site_id,
                                              res.deployment_id)

        log.debug(
            "activating deployment without device models, expecting fail")
        self.assert_deploy_fail(
            res.deployment_id,
            "The set of devices could not be mapped to the set of sites")

    def assert_deploy_fail(self,
                           deployment_id,
                           fail_message="did not specify fail_message"):
        with self.assertRaises(BadRequest) as cm:
            self.omsclient.activate_deployment(deployment_id)
        self.assertIn(fail_message, cm.exception.message)
示例#25
0
class TestOrgManagementServiceInt(IonIntegrationTestCase):
    def setUp(self):

        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2coi.yml')

        self.resource_registry = ResourceRegistryServiceClient(
            node=self.container.node)
        self.org_management_service = OrgManagementServiceClient(
            node=self.container.node)

    def test_org_crud(self):

        with self.assertRaises(BadRequest) as br:
            self.org_management_service.create_org(
                IonObject("Org", {"name": "Test Facility"}))
        self.assertTrue(
            "can only contain alphanumeric and underscore characters" in
            br.exception.message)

        with self.assertRaises(BadRequest):
            self.org_management_service.create_org()

        org_obj = IonObject("Org", {"name": "TestFacility"})
        org_id = self.org_management_service.create_org(org_obj)
        self.assertNotEqual(org_id, None)

        org = None
        org = self.org_management_service.read_org(org_id)
        self.assertNotEqual(org, None)

        #Check that the roles got associated to them
        role_list = self.org_management_service.find_org_roles(org_id)
        self.assertEqual(len(role_list), 2)

        with self.assertRaises(BadRequest):
            self.org_management_service.update_org()
        org.name = 'Updated_TestFacility'
        self.org_management_service.update_org(org)

        org = None
        org = self.org_management_service.read_org(org_id)
        self.assertNotEqual(org, None)
        self.assertEqual(org.name, 'Updated_TestFacility')

        user_role = self.org_management_service.find_org_role_by_name(
            org_id, ORG_MANAGER_ROLE)
        self.assertNotEqual(user_role, None)

        #find_org = self.org_management_service.remove_user_role(org_id, ORG_MANAGER_ROLE)
        #self.assertEqual(find_org, True)

        with self.assertRaises(BadRequest):
            self.org_management_service.delete_org()
        self.org_management_service.delete_org(org_id)

        with self.assertRaises(NotFound) as cm:
            self.org_management_service.read_org(org_id)
        self.assertIn("does not exist", cm.exception.message)

        with self.assertRaises(NotFound) as cm:
            self.org_management_service.delete_org(org_id)
        self.assertIn("does not exist", cm.exception.message)

    def test_org_affiliation(self):

        root_org = None
        root_org = self.org_management_service.find_org()
        self.assertNotEqual(root_org, None)

        org_obj = IonObject("Org", {"name": "TestFacility"})
        org_id = self.org_management_service.create_org(org_obj)
        self.assertNotEqual(org_id, None)

        ret = self.org_management_service.affiliate_org(root_org._id, org_id)
        self.assertTrue(ret)

        ret = self.org_management_service.unaffiliate_org(root_org._id, org_id)
        self.assertTrue(ret)

    def test_find_org_containers(self):

        root_org = None
        root_org = self.org_management_service.find_org()
        self.assertNotEqual(root_org, None)

        containers = self.org_management_service.find_org_containers(
            root_org._id)

        all_containers, _ = self.resource_registry.find_resources(
            restype=RT.CapabilityContainer, id_only=True)

        self.assertEqual(len(containers), len(all_containers))
示例#26
0
class InstrumentSimulator(ApeComponent):

    def __init__(self, component_id, agent, configuration):
        ApeComponent.__init__(self, component_id, agent, configuration)

    model_id = None
    keep_running = True
    emit_granules = False
    thread = None

    def _start(self):
        # interact with container
        self.resource_registry = ResourceRegistryServiceClient(node=self.agent.container.node)
        self.pubsubclient = PubsubManagementServiceClient(node=self.agent.container.node)
        self.imsclient = InstrumentManagementServiceClient(node=self.agent.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.agent.container.node)
        self.data_product_client = DataProductManagementServiceClient(node=self.agent.container.node)

        # add appropriate entries to the pycc registry
        if self.configuration.easy_registration:
            self.register()

        # begin loop to generate/send data
        self.thread = self._DataEmitter(self)
        self.thread.start()

    def _stop(self):
        log.debug("stopping instrument")
        self.stop_sending()
        self.keep_running = False

    def perform_action(self, request):
        log.debug('instrument simulator performing action ' + str(request))
        if isinstance(request, StartRequest):
            self.start_sending()
        elif isinstance(request, StopRequest):
            self.stop_sending()
        elif isinstance(request, RegisterWithContainer):
            self.register()
        else:
            raise ApeException('instrument simulator does not know how to: ' + str(request))
        
    def register(self):
        ''' create entries in resource registry for this instrument '''
        # TODO: handle more than one ID returned from registry
        # TODO: fail if request register but already in registry (and not easy_registration)
        # TODO: should register device agent
        log.debug('registering instrument')
        product_ids,_ = self.resource_registry.find_resources(RT.DataProduct, None, self.configuration.data_product, id_only=True)
        if len(product_ids) > 0:
            log.debug('data product was already in the registry')
            self.data_product_id = product_ids[0]
            # if self.configuration.easy_registration:
            #     self.data_product_id = product_ids[0]
            # else:
            #     #raise ApeException('should not find data product in registry until i add it: ' + self.configuration.data_product)
            #     ## TODO need ID from name:  self.data_product_id = self.data_product_client.read_data_product(data_product=self.configuration.data_product)
            #     pass
        else:
            log.debug('adding data product to the registry')
            # Create InstrumentDevice
            device = IonObject(RT.InstrumentDevice, name=self.configuration.data_product, description='instrument simulator', serial_number=self.configuration.data_product )
            device_id = self.imsclient.create_instrument_device(instrument_device=device)
            self.imsclient.assign_instrument_model_to_instrument_device(instrument_model_id=self._get_model_id(), instrument_device_id=device_id)
            

            #@TODO: Do not use CoverageCraft ever.

            craft = CoverageCraft
            sdom, tdom = craft.create_domains()
            sdom = sdom.dump()
            tdom = tdom.dump()
            parameter_dictionary = craft.create_parameters()
            parameter_dictionary = parameter_dictionary.dump()
            data_product = IonObject(RT.DataProduct,name=self.configuration.data_product,description='ape producer', spatial_domain=sdom, temporal_domain=tdom)
            stream_def_id = self._get_streamdef_id(parameter_dictionary)
            self.data_product_id = self.data_product_client.create_data_product(data_product=data_product, stream_definition_id=stream_def_id)
            self.damsclient.assign_data_product(input_resource_id=device_id, data_product_id=self.data_product_id)
            if self.configuration.persist_product:
                self.data_product_client.activate_data_product_persistence(data_product_id=self.data_product_id, persist_data=True, persist_metadata=True)

        # get/create producer id
#        producer_ids, _ = self.clients.resource_registry.find_objects(self.data_product_id, PRED.hasDataProducer, RT.DataProducer, id_only=True)
#        if len(producer_ids)>0:
#            log.debug('data producer already in the registry')
#            self.data_producer_id = producer_ids[0]
#        else:
        self.data_producer_id = 'UNUSED'

        self._create_publisher()

    def _get_model_id(self):
        if self.model_id is None:
            try:
                self.model_id = self._read_model_id()
                self.model_id = self._read_model_id()
            except:
                model = IonObject(RT.InstrumentModel, name=self.configuration.instrument.model_name) #, description=self.configuration.model_name, model_label=self.data_source_name)
                self.model_id = self.imsclient.create_instrument_model(model)
        return self.model_id

    def _read_model_id(self):
        # debug reading instrument model
        try_this = self.resource_registry.find_resources(restype=RT.InstrumentModel, id_only=True)
        model_ids = self.resource_registry.find_resources(restype=RT.InstrumentModel, id_only=True, name=self.configuration.instrument.model_name)
        return model_ids[0][0]

    def _get_streamdef_id(self, pdict=None):
        #TODO: figure out how to read existing RAW definition instead of just creating
        #try:
            #stream_type = self.pubsubclient.read_stream_definition()
        stream_def_id = self.pubsubclient.create_stream_definition(name='RAW stream',parameter_dictionary=pdict)
        return stream_def_id

    def _create_publisher(self):
        stream_ids, _ = self.resource_registry.find_objects(self.data_product_id, PRED.hasStream, None, True)
        stream_id = stream_ids[0]
        self.publisher = StreamPublisher(process=self.agent, stream_id=stream_id)

    class _DataEmitter(Thread):
        ''' do not make outer class a Thread b/c start() is already meaningful to a pyon process '''
        def __init__(self, instrument):
            Thread.__init__(self)
            self.instrument = instrument
            self.daemon = True
        def run(self):
            self.instrument.run()

    def start_sending(self):
        self.emit_granules = True
    def stop_sending(self):
        self.emit_granules = False

    def _get_parameter_dictionary(self):
        pdict = ParameterDictionary()

        cond_ctxt = ParameterContext('salinity', param_type=QuantityType(value_encoding=np.float64))
        cond_ctxt.uom = 'unknown'
        cond_ctxt.fill_value = 0e0
        pdict.add_context(cond_ctxt)

        pres_ctxt = ParameterContext('lat', param_type=QuantityType(value_encoding=np.float64))
        pres_ctxt.uom = 'unknown'
        pres_ctxt.fill_value = 0x0
        pdict.add_context(pres_ctxt)

        temp_ctxt = ParameterContext('lon', param_type=QuantityType(value_encoding=np.float64))
        temp_ctxt.uom = 'unknown'
        temp_ctxt.fill_value = 0x0
        pdict.add_context(temp_ctxt)

        oxy_ctxt = ParameterContext('oxygen', param_type=QuantityType(value_encoding=np.float64))
        oxy_ctxt.uom = 'unknown'
        oxy_ctxt.fill_value = 0x0
        pdict.add_context(oxy_ctxt)

        internal_ts_ctxt = ParameterContext(name='internal_timestamp', param_type=QuantityType(value_encoding=np.float64))
        internal_ts_ctxt._derived_from_name = 'time'
        internal_ts_ctxt.uom = 'seconds'
        internal_ts_ctxt.fill_value = -1
        pdict.add_context(internal_ts_ctxt, is_temporal=True)

        driver_ts_ctxt = ParameterContext(name='driver_timestamp', param_type=QuantityType(value_encoding=np.float64))
        driver_ts_ctxt._derived_from_name = 'time'
        driver_ts_ctxt.uom = 'seconds'
        driver_ts_ctxt.fill_value = -1
        pdict.add_context(driver_ts_ctxt)

        return pdict

    def run(self):
        try:
            self.do_run()
        except:
            log.exception('instrument simulator thread shutting down')

    def do_run(self):
        ''' main loop: essentially create granule, publish granule, then pause to remain at target rate.
            three lines of the useful code marked with ###, everything else is timing/reporting.
        '''
        target_iteration_time = sleep_time = self.configuration.interval
        first_batch = True
        granule_count = iteration_count = granule_sum_size = 0
        granule_elapsed_seconds = publish_elapsed_seconds = sleep_elapsed_seconds = iteration_elapsed_seconds = 0.
        do_timing = self.configuration.report_timing or self.configuration.log_timing
        parameter_dictionary = self._get_parameter_dictionary() #DatasetManagementServiceClient(node=self.agent.container.node).read_parameter_dictionary_by_name('ctd_parsed_param_dict')
        if not parameter_dictionary:
            log.error('failed to find parameter dictionary: ctd_parsed_param_dict (producer thread aborting)')
            return
        if type(parameter_dictionary) == dict:
            log.debug('pdict is dict')
        elif isinstance(parameter_dictionary,ParameterDictionary):
            log.debug('pdict is ParameterDictionary')
        else:
            log.warn('invalid pdict: %r', parameter_dictionary)
            parameter_dictionary = parameter_dictionary.__dict__

        while self.keep_running:
            if self.emit_granules:
                adjust_timing = True
                iteration_start = granule_start = time()
                if do_timing:
                    granule_count+=1

                ### step 1: create granule
                granule = self.configuration.instrument.get_granule(time=time(), pd=parameter_dictionary)

                if do_timing:
                    granule_end = publish_start = time()

                try:
                    ### step 2: publish granule
                    log.debug(self.component_id + ' publishing granule')
                    self.publisher.publish(granule)
                except Exception as e:
                    if self.keep_running:
                        raise e
                    else:
                        # while blocking on publish(), async call may have closed connection
                        # so eat exception and return cleanly
                        return

                if do_timing:
                    publish_end = time()
                    granule_elapsed_seconds += (granule_end - granule_start)
                    publish_elapsed_seconds += (publish_end - publish_start)
                    pickled_granule = pickle.dumps(granule)
                    granule_size = len(pickled_granule)
                    granule_sum_size += granule_size
                    sleep_start = time()

                ### step 3: sleep to maintain configured rate
                if sleep_time>0 or self.configuration.sleep_even_zero:
                    sleep(sleep_time)

                if do_timing:
                    sleep_end = time()
                    sleep_elapsed_seconds += (sleep_end-sleep_start)

                    if granule_count%self.configuration.timing_rate==0:
                        # skip initial batch of timing -- can be skewed b/c one instrument start before the other,
                        # so first instrument works against a more idle system
                        if first_batch:
                            granule_count = granule_elapsed_seconds = publish_elapsed_seconds = sleep_elapsed_seconds = 0.
                            first_batch = False
                            run_start_time = time()
                        else:
                            adjust_timing = False
                            if self.configuration.log_timing:
                                log.info('%s: granule: %f, publish: %f, sleep: %f, size %f' %
                                          (self.configuration.data_product, granule_elapsed_seconds/granule_count,
                                           publish_elapsed_seconds/granule_count, sleep_elapsed_seconds/granule_count,
                                           granule_sum_size/granule_count))
                            if self.configuration.report_timing:
                                report = { 'count': granule_count,
                                           'time': time() - run_start_time,
                                           'publish':publish_elapsed_seconds/granule_count,
                                           'granule':granule_elapsed_seconds/granule_count,
                                           'sleep': sleep_elapsed_seconds/granule_count,
                                           'iteration': iteration_elapsed_seconds/iteration_count }
                                if self.configuration.include_size:
                                    report['size'] = granule_sum_size
                                message = PerformanceResult(report)
                                self.report(message)

                if adjust_timing:
                    iteration_count+=1
                    iteration_end = time()
                    actual_iteration_time = iteration_end - iteration_start
                    iteration_elapsed_seconds += actual_iteration_time
                    adjustment = target_iteration_time - actual_iteration_time
                    sleep_time = max(sleep_time + adjustment, 0)

            else:
                # if not emitting granules, don't do timing either
                # and don't allow to sleep for too short a time and hog CPU
                sleep(max(10,self.configuration.interval))
                log.debug('not emitting')
示例#27
0
class NotificationWorker(TransformEventListener):
    """
    Instances of this class acts as a Notification Worker.
    """
    def on_init(self):
        self.user_info = {}
        self.resource_registry = ResourceRegistryServiceClient()
        self.q = gevent.queue.Queue()

        super(NotificationWorker, self).on_init()

    def test_hook(self, user_info, reverse_user_info):
        '''
        This method exists only to facilitate the testing of the reload of the user_info dictionary
        '''
        self.q.put((user_info, reverse_user_info))

    def on_start(self):
        super(NotificationWorker, self).on_start()

        self.reverse_user_info = None
        self.user_info = None

        #------------------------------------------------------------------------------------
        # Start by loading the user info and reverse user info dictionaries
        #------------------------------------------------------------------------------------

        try:
            self.user_info = self.load_user_info()
            self.reverse_user_info = calculate_reverse_user_info(
                self.user_info)

            log.debug(
                "On start up, notification workers loaded the following user_info dictionary: %s"
                % self.user_info)
            log.debug("The calculated reverse user info: %s" %
                      self.reverse_user_info)

        except NotFound as exc:
            if exc.message.find('users_index') > -1:
                log.warning(
                    "Notification workers found on start up that users_index have not been loaded yet."
                )
            else:
                raise NotFound(exc.message)

        #------------------------------------------------------------------------------------
        # Create an event subscriber for Reload User Info events
        #------------------------------------------------------------------------------------

        def reload_user_info(event_msg, headers):
            '''
            Callback method for the subscriber to ReloadUserInfoEvent
            '''

            notification_id = event_msg.notification_id
            log.debug(
                "(Notification worker received a ReloadNotificationEvent. The relevant notification_id is %s"
                % notification_id)

            try:
                self.user_info = self.load_user_info()
            except NotFound:
                log.warning("ElasticSearch has not yet loaded the user_index.")

            self.reverse_user_info = calculate_reverse_user_info(
                self.user_info)
            self.test_hook(self.user_info, self.reverse_user_info)

            log.debug("After a reload, the user_info: %s" % self.user_info)
            log.debug("The recalculated reverse_user_info: %s" %
                      self.reverse_user_info)

        # the subscriber for the ReloadUSerInfoEvent
        self.reload_user_info_subscriber = EventSubscriber(
            event_type="ReloadUserInfoEvent",
            origin='UserNotificationService',
            callback=reload_user_info)
        self.reload_user_info_subscriber.start()

    def process_event(self, msg, headers):
        """
        Callback method for the subscriber listening for all events
        """
        #------------------------------------------------------------------------------------
        # From the reverse user info dict find out which users have subscribed to that event
        #------------------------------------------------------------------------------------

        user_ids = []
        if self.reverse_user_info:
            log.debug(
                "Notification worker checking for users interested in %s" %
                msg.type_)
            user_ids = check_user_notification_interest(
                event=msg, reverse_user_info=self.reverse_user_info)

        #------------------------------------------------------------------------------------
        # Send email to the users
        #------------------------------------------------------------------------------------

        for user_id in user_ids:
            msg_recipient = self.user_info[user_id]['user_contact'].email
            self.smtp_client = setting_up_smtp_client()
            send_email(message=msg,
                       msg_recipient=msg_recipient,
                       smtp_client=self.smtp_client)
            self.smtp_client.quit()

    def on_stop(self):
        # close subscribers safely
        self.reload_user_info_subscriber.stop()

        super(NotificationWorker, self).on_stop()

    def on_quit(self):
        # close subscribers safely
        self.reload_user_info_subscriber.stop()

        super(NotificationWorker, self).on_quit()

    def load_user_info(self):
        '''
        Method to load the user info dictionary used by the notification workers and the UNS

        @retval user_info dict
        '''

        users, _ = self.resource_registry.find_resources(restype=RT.UserInfo)

        user_info = {}

        if not users:
            return {}

        for user in users:
            notifications = []
            notification_preferences = None
            for variable in user.variables:
                if variable['name'] == 'notifications':
                    notifications = variable['value']

                if variable['name'] == 'notification_preferences':
                    notification_preferences = variable['value']

            user_info[user._id] = {
                'user_contact': user.contact,
                'notifications': notifications,
                'notification_preferences': notification_preferences
            }

        return user_info
示例#28
0
class VisualizationIntegrationTestHelper(IonIntegrationTestCase):

    def create_ctd_input_stream_and_data_product(self, data_product_name='ctd_parsed'):

        cc = self.container
        assertions = self.assertTrue

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.pubsubclient =  PubsubManagementServiceClient(node=self.container.node)
        self.ingestclient = IngestionManagementServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node)
        self.datasetclient =  DatasetManagementServiceClient(node=self.container.node)
        self.workflowclient = WorkflowManagementServiceClient(node=self.container.node)
        self.process_dispatcher = ProcessDispatcherServiceClient(node=self.container.node)
        self.vis_client = VisualizationServiceClient(node=self.container.node)


        #-------------------------------
        # Create CTD Parsed as the initial data product
        #-------------------------------
        # create a stream definition for the data from the ctd simulator
        ctd_stream_def = SBE37_CDM_stream_definition()
        ctd_stream_def_id = self.pubsubclient.create_stream_definition(container=ctd_stream_def, name='Simulated CTD data')


        log.debug('Creating new CDM data product with a stream definition')

        craft = CoverageCraft
        sdom, tdom = craft.create_domains()
        sdom = sdom.dump()
        tdom = tdom.dump()
        parameter_dictionary = craft.create_parameters()
        parameter_dictionary = parameter_dictionary.dump()

        dp_obj = IonObject(RT.DataProduct,
            name=data_product_name,
            description='ctd stream test',
            temporal_domain = tdom,
            spatial_domain = sdom)

        ctd_parsed_data_product_id = self.dataproductclient.create_data_product(dp_obj, ctd_stream_def_id, parameter_dictionary)

        log.debug('new ctd_parsed_data_product_id = %s' % ctd_parsed_data_product_id)

        #Only ever need one device for testing purposes.
        instDevice_obj,_ = self.rrclient.find_resources(restype=RT.InstrumentDevice, name='SBE37IMDevice')
        if instDevice_obj:
            instDevice_id = instDevice_obj[0]._id
        else:
            instDevice_obj = IonObject(RT.InstrumentDevice, name='SBE37IMDevice', description="SBE37IMDevice", serial_number="12345" )
            instDevice_id = self.imsclient.create_instrument_device(instrument_device=instDevice_obj)

        self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=ctd_parsed_data_product_id)

        self.dataproductclient.activate_data_product_persistence(data_product_id=ctd_parsed_data_product_id)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(ctd_parsed_data_product_id, PRED.hasStream, None, True)
        assertions(len(stream_ids) > 0 )
        ctd_stream_id = stream_ids[0]

        return ctd_stream_id, ctd_parsed_data_product_id

    def create_data_product(self, dp_name = "", dp_description = ""):

        craft = CoverageCraft
        sdom, tdom = craft.create_domains()
        sdom = sdom.dump()
        tdom = tdom.dump()
        parameter_dictionary = craft.create_parameters()   # this creates a ParameterDictionary object
        parameter_dictionary = parameter_dictionary.dump()  # this returns a python dictionary

        data_prod_obj = IonObject(RT.DataProduct,
            name=dp_name,
            description=dp_description,
            temporal_domain = tdom,
            spatial_domain = sdom)

        data_prod_id = self.create_data_product(data_prod_obj, stream_definition_id, parameter_dictionary)

        return data_prod_id, data_prod_obj

    def start_simple_input_stream_process(self, ctd_stream_id):
        return self.start_input_stream_process(ctd_stream_id)

    def start_sinusoidal_input_stream_process(self, ctd_stream_id):
        return self.start_input_stream_process(ctd_stream_id, 'ion.processes.data.sinusoidal_stream_publisher', 'SinusoidalCtdPublisher')

    def start_input_stream_process(self, ctd_stream_id, module = 'ion.processes.data.ctd_stream_publisher', class_name= 'SimpleCtdPublisher'):


        ###
        ### Start the process for producing the CTD data
        ###
        # process definition for the ctd simulator...
        producer_definition = ProcessDefinition()
        producer_definition.executable = {
            'module':module,
            'class':class_name
        }

        ctd_sim_procdef_id = self.process_dispatcher.create_process_definition(process_definition=producer_definition)

        # Start the ctd simulator to produce some data
        configuration = {
            'process':{
                'stream_id':ctd_stream_id,
                }
        }
        ctd_sim_pid = self.process_dispatcher.schedule_process(process_definition_id=ctd_sim_procdef_id, configuration=configuration)

        return ctd_sim_pid

    def start_output_stream_and_listen(self, ctd_stream_id, data_product_stream_ids, message_count_per_stream=10):

        cc = self.container
        assertions = self.assertTrue

        ###
        ### Make a subscriber in the test to listen for transformed data
        ###
        salinity_subscription_id = self.pubsubclient.create_subscription(
            query=StreamQuery(data_product_stream_ids),
            exchange_name = 'workflow_test',
            exchange_point = 'science_data',
            name = "test workflow transformations",
        )

        pid = cc.spawn_process(name='dummy_process_for_test',
            module='pyon.ion.process',
            cls='SimpleProcess',
            config={})
        dummy_process = cc.proc_manager.procs[pid]

        subscriber_registrar = StreamSubscriberRegistrar(process=dummy_process, container=cc)

        result = gevent.event.AsyncResult()
        results = []
        message_count = len(data_product_stream_ids) * message_count_per_stream

        def message_received(message, headers):
            # Heads
            results.append(message)
            if len(results) >= message_count:   #Only wait for so many messages - per stream
                result.set(True)

        subscriber = subscriber_registrar.create_subscriber(exchange_name='workflow_test', callback=message_received)
        subscriber.start()

        # after the queue has been created it is safe to activate the subscription
        self.pubsubclient.activate_subscription(subscription_id=salinity_subscription_id)


        #Start the input stream process
        if ctd_stream_id is not None:
            ctd_sim_pid = self.start_simple_input_stream_process(ctd_stream_id)

        # Assert that we have received data
        assertions(result.get(timeout=30))

        # stop the flow parse the messages...
        if ctd_stream_id is not None:
            self.process_dispatcher.cancel_process(ctd_sim_pid) # kill the ctd simulator process - that is enough data

        self.pubsubclient.deactivate_subscription(subscription_id=salinity_subscription_id)

        subscriber.stop()

        return results


    def validate_messages(self, results):

        cc = self.container
        assertions = self.assertTrue

        first_salinity_values = None

        for message in results:
            rdt = RecordDictionaryTool.load_from_granule(message)

            try:
                temp = get_safe(rdt, 'temp')
            #                psd = PointSupplementStreamParser(stream_definition=self.ctd_stream_def, stream_granule=message)
            #                temp = psd.get_values('temperature')
            #                log.info(psd.list_field_names())
            except KeyError as ke:
                temp = None

            if temp is not None:
                assertions(isinstance(temp, numpy.ndarray))

                log.info( 'temperature=' + str(numpy.nanmin(temp)))

                first_salinity_values = None

            else:
                #psd = PointSupplementStreamParser(stream_definition=SalinityTransform.outgoing_stream_def, stream_granule=message)
                #log.info( psd.list_field_names())

                # Test the handy info method for the names of fields in the stream def
                #assertions('salinity' in psd.list_field_names())

                # you have to know the name of the coverage in stream def
                salinity = get_safe(rdt, 'salinity')
                #salinity = psd.get_values('salinity')
                log.info( 'salinity=' + str(numpy.nanmin(salinity)))

                # Check to see if salinity has values
                assertions(salinity != None)

                assertions(isinstance(salinity, numpy.ndarray))
                assertions(numpy.nanmin(salinity) > 0.0) # salinity should always be greater than 0

                if first_salinity_values is None:
                    first_salinity_values = salinity.tolist()
                else:
                    second_salinity_values = salinity.tolist()
                    assertions(len(first_salinity_values) == len(second_salinity_values))
                    for idx in range(0,len(first_salinity_values)):
                        assertions(first_salinity_values[idx]*2.0 == second_salinity_values[idx])


    def validate_data_ingest_retrieve(self, dataset_id):

        assertions = self.assertTrue
        self.data_retriever = DataRetrieverServiceClient(node=self.container.node)

        #validate that data was ingested
        replay_granule = self.data_retriever.retrieve_last_granule(dataset_id)
        rdt = RecordDictionaryTool.load_from_granule(replay_granule)
        salinity = get_safe(rdt, 'salinity')
        assertions(salinity != None)

        #retrieve all the granules from the database and check the values
        replay_granule_all = self.data_retriever.retrieve(dataset_id)
        rdt = RecordDictionaryTool.load_from_granule(replay_granule_all)
        for k, v in rdt.iteritems():
            if k == 'salinity':
                for val in numpy.nditer(v):
                    assertions(val > 0)

    def create_salinity_data_process_definition(self):

        # Salinity: Data Process Definition

        #First look to see if it exists and if not, then create it
        dpd,_ = self.rrclient.find_resources(restype=RT.DataProcessDefinition, name='ctd_salinity')
        if len(dpd) > 0:
            return dpd[0]

        log.debug("Create data process definition SalinityTransform")
        dpd_obj = IonObject(RT.DataProcessDefinition,
            name='ctd_salinity',
            description='create a salinity data product',
            module='ion.processes.data.transforms.ctd.ctd_L2_salinity',
            class_name='SalinityTransform',
            process_source='SalinityTransform source code here...')
        try:
            ctd_L2_salinity_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
        except Excpetion as ex:
            self.fail("failed to create new SalinityTransform data process definition: %s" %ex)

        # create a stream definition for the data from the salinity Transform
        sal_stream_def_id = self.pubsubclient.create_stream_definition(container=SalinityTransform.outgoing_stream_def,  name='Salinity')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(sal_stream_def_id, ctd_L2_salinity_dprocdef_id )

        return ctd_L2_salinity_dprocdef_id

    def create_salinity_doubler_data_process_definition(self):

        #First look to see if it exists and if not, then create it
        dpd,_ = self.rrclient.find_resources(restype=RT.DataProcessDefinition, name='salinity_doubler')
        if len(dpd) > 0:
            return dpd[0]

        # Salinity Doubler: Data Process Definition
        log.debug("Create data process definition SalinityDoublerTransform")
        dpd_obj = IonObject(RT.DataProcessDefinition,
            name='salinity_doubler',
            description='create a salinity doubler data product',
            module='ion.processes.data.transforms.example_double_salinity',
            class_name='SalinityDoubler',
            process_source='SalinityDoubler source code here...')
        try:
            salinity_doubler_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
        except Exception as ex:
            self.fail("failed to create new SalinityDoubler data process definition: %s" %ex)


        # create a stream definition for the data from the salinity Transform
        salinity_double_stream_def_id = self.pubsubclient.create_stream_definition(container=SalinityDoubler.outgoing_stream_def,  name='SalinityDoubler')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(salinity_double_stream_def_id, salinity_doubler_dprocdef_id )

        return salinity_doubler_dprocdef_id


    def create_transform_process(self, data_process_definition_id, data_process_input_dp_id):

        data_process_definition = self.rrclient.read(data_process_definition_id)

        # Find the link between the output Stream Definition resource and the Data Process Definition resource
        stream_ids,_ = self.rrclient.find_objects(data_process_definition._id, PRED.hasStreamDefinition, RT.StreamDefinition,  id_only=True)
        if not stream_ids:
            raise Inconsistent("The data process definition %s is missing an association to an output stream definition" % data_process_definition._id )
        process_output_stream_def_id = stream_ids[0]

        #Concatenate the name of the workflow and data process definition for the name of the data product output
        data_process_name = data_process_definition.name

        # Create the output data product of the transform
        transform_dp_obj = IonObject(RT.DataProduct, name=data_process_name,description=data_process_definition.description)
        transform_dp_id = self.dataproductclient.create_data_product(transform_dp_obj, process_output_stream_def_id)
        self.dataproductclient.activate_data_product_persistence(data_product_id=transform_dp_id)

        #last one out of the for loop is the output product id
        output_data_product_id = transform_dp_id

        # Create the  transform data process
        log.debug("create data_process and start it")
        data_process_id = self.dataprocessclient.create_data_process(data_process_definition._id, [data_process_input_dp_id], {'output':transform_dp_id})
        self.dataprocessclient.activate_data_process(data_process_id)


        #Find the id of the output data stream
        stream_ids, _ = self.rrclient.find_objects(transform_dp_id, PRED.hasStream, None, True)
        if not stream_ids:
            raise Inconsistent("The data process %s is missing an association to an output stream" % data_process_id )

        return data_process_id, output_data_product_id



    def create_google_dt_data_process_definition(self):

        #First look to see if it exists and if not, then create it
        dpd,_ = self.rrclient.find_resources(restype=RT.DataProcessDefinition, name='google_dt_transform')
        if len(dpd) > 0:
            return dpd[0]

        # Data Process Definition
        log.debug("Create data process definition GoogleDtTransform")
        dpd_obj = IonObject(RT.DataProcessDefinition,
            name='google_dt_transform',
            description='Convert data streams to Google DataTables',
            module='ion.processes.data.transforms.viz.google_dt',
            class_name='VizTransformGoogleDT',
            process_source='VizTransformGoogleDT source code here...')
        try:
            procdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
        except Exception as ex:
            self.fail("failed to create new VizTransformGoogleDT data process definition: %s" %ex)


        # create a stream definition for the data from the
        stream_def_id = self.pubsubclient.create_stream_definition(container=VizTransformGoogleDT.outgoing_stream_def,  name='VizTransformGoogleDT')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(stream_def_id, procdef_id )

        return procdef_id


    def validate_google_dt_transform_results(self, results):

        cc = self.container
        assertions = self.assertTrue

        # if its just one granule, wrap it up in a list so we can use the following for loop for a couple of cases
        if isinstance(results,Granule):
            results =[results]

        for g in results:

            if isinstance(g,Granule):

                tx = TaxyTool.load_from_granule(g)
                rdt = RecordDictionaryTool.load_from_granule(g)

                gdt_data = get_safe(rdt, 'google_dt_components')

                # IF this granule does not contains google dt, skip
                if gdt_data == None:
                    continue

                gdt = gdt_data[0]

                assertions(gdt['viz_product_type'] == 'google_dt' )
                assertions(len(gdt['data_description']) >= 0) # Need to come up with a better check
                assertions(len(gdt['data_content']) >= 0)




    def create_mpl_graphs_data_process_definition(self):

        #First look to see if it exists and if not, then create it
        dpd,_ = self.rrclient.find_resources(restype=RT.DataProcessDefinition, name='mpl_graphs_transform')
        if len(dpd) > 0:
            return dpd[0]

        #Data Process Definition
        log.debug("Create data process definition MatplotlibGraphsTransform")
        dpd_obj = IonObject(RT.DataProcessDefinition,
            name='mpl_graphs_transform',
            description='Convert data streams to Matplotlib graphs',
            module='ion.processes.data.transforms.viz.matplotlib_graphs',
            class_name='VizTransformMatplotlibGraphs',
            process_source='VizTransformMatplotlibGraphs source code here...')
        try:
            procdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
        except Exception as ex:
            self.fail("failed to create new VizTransformMatplotlibGraphs data process definition: %s" %ex)


        # create a stream definition for the data
        stream_def_id = self.pubsubclient.create_stream_definition(container=VizTransformMatplotlibGraphs.outgoing_stream_def,  name='VizTransformMatplotlibGraphs')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(stream_def_id, procdef_id )

        return procdef_id

    def validate_mpl_graphs_transform_results(self, results):

        cc = self.container
        assertions = self.assertTrue

        # if its just one granule, wrap it up in a list so we can use the following for loop for a couple of cases
        if isinstance(results,Granule):
            results =[results]

        for g in results:
            if isinstance(g,Granule):

                tx = TaxyTool.load_from_granule(g)
                rdt = RecordDictionaryTool.load_from_granule(g)

                graphs = get_safe(rdt, 'matplotlib_graphs')

                if graphs == None:
                    continue

                for graph in graphs[0]:

                    # At this point only dictionaries containing image data should be passed
                    # For some reason non dictionary values are filtering through.
                    if not isinstance(graph, dict):
                        continue

                    assertions(graph['viz_product_type'] == 'matplotlib_graphs' )
                    # check to see if the list (numpy array) contains actual images
                    assertions(imghdr.what(graph['image_name'], h = graph['image_obj']) == 'png')



    def validate_vis_service_google_dt_results(self, results):


        assertions = self.assertTrue

        assertions(results)
        gdt_str = (results.lstrip("google.visualization.Query.setResponse(")).rstrip(")")

        assertions(len(gdt_str) > 0)

        return

    def validate_vis_service_mpl_graphs_results(self, results):

        assertions = self.assertTrue
        assertions(results)

        # check to see if the object passed is a dictionary with a valid image object in it
        image_format = results["content_type"].lstrip("image/")

        assertions(imghdr.what(results['image_name'], h = base64.decodestring(results['image_obj'])) == image_format)

        return
class TestExchangeManagementServiceInt(IonIntegrationTestCase):
    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2coi.yml')

        self.ems = ExchangeManagementServiceClient()
        self.rr = ResourceRegistryServiceClient()

        orglist, _ = self.rr.find_resources(RT.Org)
        if len(orglist) < 1:
            raise StandardError("No orgs found")

        self.org_id = orglist[0]._id

        # we test actual exchange interaction in pyon, so it's fine to mock the broker interaction here
        self.container.ex_manager.create_xs = Mock()
        self.container.ex_manager.delete_xs = Mock()
        self.container.ex_manager.create_xp = Mock()
        self.container.ex_manager.delete_xp = Mock()

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),
                     'Test reaches into container, doesn\'t work with CEI')
    def test_xs_create_delete(self):
        exchange_space = ExchangeSpace(name="bobo")
        esid = self.ems.create_exchange_space(exchange_space, self.org_id)

        # should be able to pull from RR an exchange space
        es2 = self.rr.read(esid)
        self.assertEquals(exchange_space.name, es2.name)

        # should have an exchange declared on the broker
        self.container.ex_manager.create_xs.assert_called_once_with(
            'bobo', use_ems=False)

        # should have an assoc to an org
        orglist, _ = self.rr.find_subjects(RT.Org,
                                           PRED.hasExchangeSpace,
                                           esid,
                                           id_only=True)
        self.assertEquals(len(orglist), 1)
        self.assertEquals(orglist[0], self.org_id)

        self.ems.delete_exchange_space(esid)

        # should no longer have that id in the RR
        self.assertRaises(NotFound, self.rr.read, esid)

        # should no longer have an assoc to an org
        orglist2, _ = self.rr.find_subjects(RT.Org,
                                            PRED.hasExchangeSpace,
                                            esid,
                                            id_only=True)
        self.assertEquals(len(orglist2), 0)

        # should no longer have that exchange declared
        self.assertEquals(self.container.ex_manager.delete_xs.call_count, 1)
        self.assertIn('bobo', self.container.ex_manager.delete_xs.call_args[0]
                      [0].exchange)  # prefixed with sysname

    def test_xs_delete_without_create(self):
        self.assertRaises(NotFound, self.ems.delete_exchange_space, '123')

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),
                     'Test reaches into container, doesn\'t work with CEI')
    def test_xp_create_delete(self):

        # xp needs an xs first
        exchange_space = ExchangeSpace(name="doink")
        esid = self.ems.create_exchange_space(exchange_space, self.org_id)

        exchange_point = ExchangePoint(name="hammer")
        epid = self.ems.create_exchange_point(exchange_point, esid)

        # should be in RR
        ep2 = self.rr.read(epid)
        self.assertEquals(exchange_point.name, ep2.name)

        # should be associated to the XS as well
        xslist, _ = self.rr.find_subjects(RT.ExchangeSpace,
                                          PRED.hasExchangePoint,
                                          epid,
                                          id_only=True)
        self.assertEquals(len(xslist), 1)
        self.assertEquals(xslist[0], esid)

        # should exist on broker (both xp and xs)
        self.container.ex_manager.create_xs.assert_called_once_with(
            'doink', use_ems=False)
        self.assertIn('hammer',
                      self.container.ex_manager.create_xp.call_args[0])

        self.ems.delete_exchange_point(epid)
        self.ems.delete_exchange_space(esid)

        # should no longer be in RR
        self.assertRaises(NotFound, self.rr.read, epid)

        # should no longer be associated
        xslist2, _ = self.rr.find_subjects(RT.ExchangeSpace,
                                           PRED.hasExchangePoint,
                                           epid,
                                           id_only=True)
        self.assertEquals(len(xslist2), 0)

        # should no longer exist on broker (both xp and xs)

    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),
                     'Test reaches into container, doesn\'t work with CEI')
    def test_xp_create_then_delete_xs(self):

        # xp needs an xs first
        exchange_space = ExchangeSpace(name="doink")
        esid = self.ems.create_exchange_space(exchange_space, self.org_id)

        exchange_point = ExchangePoint(name="hammer")
        epid = self.ems.create_exchange_point(exchange_point, esid)

        # delete xs
        self.ems.delete_exchange_space(esid)

        # should no longer have an association
        xslist2, _ = self.rr.find_subjects(RT.ExchangeSpace,
                                           PRED.hasExchangePoint,
                                           epid,
                                           id_only=True)
        self.assertEquals(len(xslist2), 0)

        # TEST ONLY: have to clean up the xp or we leave junk on the broker
        # we have to do it manually because the xs is gone
        #self.ems.delete_exchange_point(epid)
        # @TODO: reaching into ex manager for transport is clunky
        xs = exchange.ExchangeSpace(
            self.container.ex_manager,
            self.container.ex_manager._priviledged_transport,
            exchange_space.name)
        xp = exchange.ExchangePoint(
            self.container.ex_manager,
            self.container.ex_manager._priviledged_transport,
            exchange_point.name, xs, 'ttree')
        self.container.ex_manager.delete_xp(xp, use_ems=False)

    def test_xn_declare_and_undeclare(self):

        # xn needs an xs first
        exchange_space = ExchangeSpace(name="bozo")
        esid = self.ems.create_exchange_space(exchange_space, self.org_id)

        exchange_name = ExchangeName(name='shoes', xn_type="XN_PROCESS")
        enid = self.ems.declare_exchange_name(exchange_name, esid)

        # should be in RR
        en2 = self.rr.read(enid)
        self.assertEquals(exchange_name.name, en2.name)

        # should have an assoc from XN to XS
        xnlist, _ = self.rr.find_subjects(RT.ExchangeSpace,
                                          PRED.hasExchangeName,
                                          enid,
                                          id_only=True)
        self.assertEquals(len(xnlist), 1)
        self.assertEquals(xnlist[0], esid)

        # container API got called, will have declared a queue
        self.ems.undeclare_exchange_name(
            enid)  # canonical name = xn id in current impl

    def test_xn_declare_no_xs(self):
        exchange_name = ExchangeName(name="shoez", xn_type='XN_PROCESS')
        self.assertRaises(NotFound, self.ems.declare_exchange_name,
                          exchange_name, '11')

    def test_xn_undeclare_without_declare(self):
        self.assertRaises(NotFound, self.ems.undeclare_exchange_name,
                          'some_non_id')

    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),
                     'Test reaches into container, doesn\'t work with CEI')
    def test_xn_declare_then_delete_xs(self):

        # xn needs an xs first
        exchange_space = ExchangeSpace(name="bozo")
        esid = self.ems.create_exchange_space(exchange_space, self.org_id)

        exchange_name = ExchangeName(name='shnoz', xn_type="XN_PROCESS")
        enid = self.ems.declare_exchange_name(exchange_name, esid)

        # delete the XS
        self.ems.delete_exchange_space(esid)

        # no longer should have assoc from XS to XN
        xnlist, _ = self.rr.find_subjects(RT.ExchangeSpace,
                                          PRED.hasExchangeName,
                                          enid,
                                          id_only=True)
        self.assertEquals(len(xnlist), 0)

        # cleanup: delete the XN (assoc already removed, so we reach into the implementation here)
        # @TODO: reaching into ex manager for transport is clunky
        self.rr.delete(enid)
        xs = exchange.ExchangeSpace(
            self.container.ex_manager,
            self.container.ex_manager._priviledged_transport,
            exchange_space.name)
        xn = exchange.ExchangeName(
            self.container.ex_manager,
            self.container.ex_manager._priviledged_transport,
            exchange_name.name, xs)
        self.container.ex_manager.delete_xn(xn, use_ems=False)
示例#30
0
class TestDeployment(IonIntegrationTestCase):

    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.omsclient = ObservatoryManagementServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
        self.dmpsclient = DataProductManagementServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.psmsclient = PubsubManagementServiceClient(node=self.container.node)
        self.dataset_management = DatasetManagementServiceClient()

        self.c = DotDict()
        self.c.resource_registry = self.rrclient
        self.RR2 = EnhancedResourceRegistryClient(self.rrclient)

        self.dsmsclient = DataProcessManagementServiceClient(node=self.container.node)


        # deactivate all data processes when tests are complete
        def killAllDataProcesses():
            for proc_id in self.rrclient.find_resources(RT.DataProcess, None, None, True)[0]:
                self.dsmsclient.deactivate_data_process(proc_id)
                self.dsmsclient.delete_data_process(proc_id)
        self.addCleanup(killAllDataProcesses)


    #@unittest.skip("targeting")
    def test_create_deployment(self):

        #create a deployment with metadata and an initial site and device
        platform_site__obj = IonObject(RT.PlatformSite,
                                        name='PlatformSite1',
                                        description='test platform site')
        site_id = self.omsclient.create_platform_site(platform_site__obj)

        platform_device__obj = IonObject(RT.PlatformDevice,
                                        name='PlatformDevice1',
                                        description='test platform device')
        device_id = self.imsclient.create_platform_device(platform_device__obj)

        start = IonTime(datetime.datetime(2013,1,1))
        end = IonTime(datetime.datetime(2014,1,1))
        temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=start.to_string(), end_datetime=end.to_string())
        deployment_obj = IonObject(RT.Deployment,
                                        name='TestDeployment',
                                        description='some new deployment',
                                        constraint_list=[temporal_bounds])
        deployment_id = self.omsclient.create_deployment(deployment_obj)
        self.omsclient.deploy_platform_site(site_id, deployment_id)
        self.imsclient.deploy_platform_device(device_id, deployment_id)

        log.debug("test_create_deployment: created deployment id: %s ", str(deployment_id) )

        #retrieve the deployment objects and check that the assoc site and device are attached
        read_deployment_obj = self.omsclient.read_deployment(deployment_id)
        log.debug("test_create_deployment: created deployment obj: %s ", str(read_deployment_obj) )

        site_ids, _ = self.rrclient.find_subjects(RT.PlatformSite, PRED.hasDeployment, deployment_id, True)
        self.assertEqual(len(site_ids), 1)

        device_ids, _ = self.rrclient.find_subjects(RT.PlatformDevice, PRED.hasDeployment, deployment_id, True)
        self.assertEqual(len(device_ids), 1)

        #delete the deployment
        self.RR2.pluck(deployment_id)
        self.omsclient.force_delete_deployment(deployment_id)
        # now try to get the deleted dp object
        try:
            self.omsclient.read_deployment(deployment_id)
        except NotFound:
            pass
        else:
            self.fail("deleted deployment was found during read")

    #@unittest.skip("targeting")
    def test_prepare_deployment_support(self):

        deploy_sup = self.omsclient.prepare_deployment_support()
        self.assertTrue(deploy_sup)
        self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentDevice'].type_, "AssocDeploymentInstDevice")
        self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentDevice'].resources, [])
        self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentDevice'].associated_resources, [])
        self.assertEquals(deploy_sup.associations['DeploymentHasPlatformDevice'].type_, "AssocDeploymentPlatDevice")
        self.assertEquals(deploy_sup.associations['DeploymentHasPlatformDevice'].resources, [])
        self.assertEquals(deploy_sup.associations['DeploymentHasPlatformDevice'].associated_resources, [])
        self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentSite'].type_, "AssocDeploymentInstSite")
        self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentSite'].resources, [])
        self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentSite'].associated_resources, [])
        self.assertEquals(deploy_sup.associations['DeploymentHasPlatformSite'].type_, "AssocDeploymentPlatSite")
        self.assertEquals(deploy_sup.associations['DeploymentHasPlatformSite'].resources, [])
        self.assertEquals(deploy_sup.associations['DeploymentHasPlatformSite'].associated_resources, [])

        #create a deployment with metadata and an initial site and device
        platform_site__obj = IonObject(RT.PlatformSite,
                                        name='PlatformSite1',
                                        description='test platform site')
        site_id = self.omsclient.create_platform_site(platform_site__obj)

        platform_device__obj = IonObject(RT.PlatformDevice,
                                        name='PlatformDevice1',
                                        description='test platform device')
        device_id = self.imsclient.create_platform_device(platform_device__obj)

        start = IonTime(datetime.datetime(2013,1,1))
        end = IonTime(datetime.datetime(2014,1,1))
        temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=start.to_string(), end_datetime=end.to_string())
        deployment_obj = IonObject(RT.Deployment,
                                        name='TestDeployment',
                                        description='some new deployment',
                                        constraint_list=[temporal_bounds])
        deployment_id = self.omsclient.create_deployment(deployment_obj)

        deploy_sup = self.omsclient.prepare_deployment_support(deployment_id)

        self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentDevice'].resources, [])
        self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentDevice'].associated_resources, [])
        self.assertEquals(len(deploy_sup.associations['DeploymentHasPlatformDevice'].resources), 1)
        self.assertEquals(deploy_sup.associations['DeploymentHasPlatformDevice'].associated_resources, [])
        self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentSite'].resources, [])
        self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentSite'].associated_resources, [])
        self.assertEquals(len(deploy_sup.associations['DeploymentHasPlatformSite'].resources), 1)
        self.assertEquals(deploy_sup.associations['DeploymentHasPlatformSite'].associated_resources, [])

        self.omsclient.assign_site_to_deployment(site_id, deployment_id)
        self.omsclient.assign_device_to_deployment(device_id, deployment_id)

        deploy_sup = self.omsclient.prepare_deployment_support(deployment_id)

        self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentDevice'].resources, [])
        self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentDevice'].associated_resources, [])
        self.assertEquals(len(deploy_sup.associations['DeploymentHasPlatformDevice'].resources), 1)
        self.assertEquals(len(deploy_sup.associations['DeploymentHasPlatformDevice'].associated_resources), 1)
        self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentSite'].resources, [])
        self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentSite'].associated_resources, [])
        self.assertEquals(len(deploy_sup.associations['DeploymentHasPlatformSite'].resources), 1)
        self.assertEquals(len(deploy_sup.associations['DeploymentHasPlatformSite'].associated_resources), 1)

        #delete the deployment
        self.RR2.pluck(deployment_id)
        self.omsclient.force_delete_deployment(deployment_id)
        # now try to get the deleted dp object
        try:
            self.omsclient.read_deployment(deployment_id)
        except NotFound:
            pass
        else:
            self.fail("deleted deployment was found during read")


    #@unittest.skip("targeting")
    def base_activate_deployment(self):

        #-------------------------------------------------------------------------------------
        # Create platform site, platform device, platform model
        #-------------------------------------------------------------------------------------

        platform_site__obj = IonObject(RT.PlatformSite,
                                        name='PlatformSite1',
                                        description='test platform site')
        platform_site_id = self.omsclient.create_platform_site(platform_site__obj)

        platform_device_obj = IonObject(RT.PlatformDevice,
                                        name='PlatformDevice1',
                                        description='test platform device')
        platform_device_id = self.imsclient.create_platform_device(platform_device_obj)

        platform_model__obj = IonObject(RT.PlatformModel,
                                        name='PlatformModel1',
                                        description='test platform model')
        platform_model_id = self.imsclient.create_platform_model(platform_model__obj)



        #-------------------------------------------------------------------------------------
        # Create instrument site
        #-------------------------------------------------------------------------------------

        instrument_site_obj = IonObject(RT.InstrumentSite,
                                        name='InstrumentSite1',
                                        description='test instrument site')
        instrument_site_id = self.omsclient.create_instrument_site(instrument_site_obj, platform_site_id)

        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        ctd_stream_def_id = self.psmsclient.create_stream_definition(name='SBE37_CDM', parameter_dictionary_id=pdict_id)


        #----------------------------------------------------------------------------------------------------
        # Create an instrument device
        #----------------------------------------------------------------------------------------------------

        instrument_device_obj = IonObject(RT.InstrumentDevice,
                                        name='InstrumentDevice1',
                                        description='test instrument device')
        instrument_device_id = self.imsclient.create_instrument_device(instrument_device_obj)
        self.rrclient.create_association(platform_device_id, PRED.hasDevice, instrument_device_id)



        #----------------------------------------------------------------------------------------------------
        # Create an instrument model
        #----------------------------------------------------------------------------------------------------

        instrument_model_obj = IonObject(RT.InstrumentModel,
                                        name='InstrumentModel1',
                                        description='test instrument model')
        instrument_model_id = self.imsclient.create_instrument_model(instrument_model_obj)


        #----------------------------------------------------------------------------------------------------
        # Create a deployment object
        #----------------------------------------------------------------------------------------------------

        start = IonTime(datetime.datetime(2013,1,1))
        end = IonTime(datetime.datetime(2014,1,1))
        temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=start.to_string(), end_datetime=end.to_string())
        deployment_obj = IonObject(RT.Deployment,
                                   name='TestDeployment',
                                   description='some new deployment',
                                   context=IonObject(OT.CabledNodeDeploymentContext),
                                   constraint_list=[temporal_bounds])
        deployment_id = self.omsclient.create_deployment(deployment_obj)

        log.debug("test_create_deployment: created deployment id: %s ", str(deployment_id) )


        ret = DotDict(instrument_site_id=instrument_site_id,
                      instrument_device_id=instrument_device_id,
                      instrument_model_id=instrument_model_id,
                      platform_site_id=platform_site_id,
                      platform_device_id=platform_device_id,
                      platform_model_id=platform_model_id,
                      deployment_id=deployment_id)


        return ret

    #@unittest.skip("targeting")
    def test_activate_deployment_normal(self):

        res = self.base_activate_deployment()

        log.debug("assigning platform and instrument models")
        self.imsclient.assign_platform_model_to_platform_device(res.platform_model_id, res.platform_device_id)
        self.imsclient.assign_instrument_model_to_instrument_device(res.instrument_model_id, res.instrument_device_id)
        self.omsclient.assign_platform_model_to_platform_site(res.platform_model_id, res.platform_site_id)
        self.omsclient.assign_instrument_model_to_instrument_site(res.instrument_model_id, res.instrument_site_id)

        log.debug("adding instrument site and device to deployment")
        self.omsclient.deploy_instrument_site(res.instrument_site_id, res.deployment_id)
        self.imsclient.deploy_instrument_device(res.instrument_device_id, res.deployment_id)

        log.debug("adding platform site and device to deployment")
        self.omsclient.deploy_platform_site(res.platform_site_id, res.deployment_id)
        self.imsclient.deploy_platform_device(res.platform_device_id, res.deployment_id)

        log.debug("activating deployment, expecting success")
        self.omsclient.activate_deployment(res.deployment_id)

        log.debug("deactivatin deployment, expecting success")
        self.omsclient.deactivate_deployment(res.deployment_id)

    #@unittest.skip("targeting")
    def test_activate_deployment_nomodels(self):

        res = self.base_activate_deployment()

        self.omsclient.deploy_instrument_site(res.instrument_site_id, res.deployment_id)
        self.imsclient.deploy_instrument_device(res.instrument_device_id, res.deployment_id)

        log.debug("activating deployment without site+device models, expecting fail")
        self.assert_deploy_fail(res.deployment_id, NotFound, "Expected 1")

        log.debug("assigning instrument site model")
        self.omsclient.assign_instrument_model_to_instrument_site(res.instrument_model_id, res.instrument_site_id)

        log.debug("activating deployment without device models, expecting fail")
        self.assert_deploy_fail(res.deployment_id, NotFound, "Expected 1")

    #@unittest.skip("targeting")
    def test_activate_deployment_nosite(self):

        res = self.base_activate_deployment()

        log.debug("assigning instrument models")
        self.imsclient.assign_instrument_model_to_instrument_device(res.instrument_model_id, res.instrument_device_id)
        self.omsclient.assign_instrument_model_to_instrument_site(res.instrument_model_id, res.instrument_site_id)

        log.debug("deploying instrument device only")
        self.imsclient.deploy_instrument_device(res.instrument_device_id, res.deployment_id)

        log.debug("activating deployment without instrument site, expecting fail")
        self.assert_deploy_fail(res.deployment_id, BadRequest, "Devices in this deployment outnumber sites")

    #@unittest.skip("targeting")
    def test_activate_deployment_nodevice(self):

        res = self.base_activate_deployment()

        log.debug("assigning platform and instrument models")
        self.imsclient.assign_instrument_model_to_instrument_device(res.instrument_model_id, res.instrument_device_id)
        self.omsclient.assign_instrument_model_to_instrument_site(res.instrument_model_id, res.instrument_site_id)

        log.debug("deploying instrument site only")
        self.omsclient.deploy_instrument_site(res.instrument_site_id, res.deployment_id)

        log.debug("activating deployment without device, expecting fail")
        self.assert_deploy_fail(res.deployment_id, BadRequest, "No devices were found in the deployment")


    def test_activate_deployment_asymmetric_children(self):
        """
        P0
        |  \
        P1  P2
        |
        I1

        Complex deployment using CSP

        P1, P2, and P3 share the same platform model.  The CSP solver should be able to work this out
        based on relationships to parents

        """

        log.debug("create models")
        imodel_id = self.RR2.create(any_old(RT.InstrumentModel))
        pmodel_id = self.RR2.create(any_old(RT.PlatformModel))

        log.debug("create devices")
        idevice_id = self.RR2.create(any_old(RT.InstrumentDevice))
        pdevice_id = [self.RR2.create(any_old(RT.PlatformDevice)) for _ in range(3)]

        log.debug("create sites")
        isite_id = self.RR2.create(any_old(RT.InstrumentSite))
        psite_id = [self.RR2.create(any_old(RT.PlatformSite)) for _ in range(3)]

        log.debug("assign models")
        self.RR2.assign_instrument_model_to_instrument_device_with_has_model(imodel_id, idevice_id)
        self.RR2.assign_instrument_model_to_instrument_site_with_has_model(imodel_id, isite_id)
        for x in range(3):
            self.RR2.assign_platform_model_to_platform_device_with_has_model(pmodel_id, pdevice_id[x])
            self.RR2.assign_platform_model_to_platform_site_with_has_model(pmodel_id, psite_id[x])

        log.debug("assign hierarchy")
        self.RR2.assign_instrument_device_to_platform_device_with_has_device(idevice_id, pdevice_id[1])
        self.RR2.assign_instrument_site_to_platform_site_with_has_site(isite_id, psite_id[1])
        for x in range(1,3):
            self.RR2.assign_platform_device_to_platform_device_with_has_device(pdevice_id[x], pdevice_id[0])
            self.RR2.assign_platform_site_to_platform_site_with_has_site(psite_id[x], psite_id[0])

        log.debug("create and activate deployment")
        dep_id = self.RR2.create(any_old(RT.Deployment, {"context": IonObject(OT.RemotePlatformDeploymentContext)}))
        self.RR2.assign_deployment_to_platform_device_with_has_deployment(dep_id, pdevice_id[0])
        self.RR2.assign_deployment_to_platform_site_with_has_deployment(dep_id, psite_id[0])
        self.omsclient.activate_deployment(dep_id)

        log.debug("verifying deployment")
        self.assertEqual(idevice_id, self.RR2.find_instrument_device_id_of_instrument_site_using_has_device(isite_id),
                         "The instrument device was not assigned to the instrument site")
        for x in range(3):
            self.assertEqual(pdevice_id[x], self.RR2.find_platform_device_id_of_platform_site_using_has_device(psite_id[x]),
                             "Platform device %d was not assigned to platform site %d" % (x, x))

    def assert_deploy_fail(self, deployment_id, err_type=BadRequest, fail_message="did not specify fail_message"):
        with self.assertRaises(err_type) as cm:
            self.omsclient.activate_deployment(deployment_id)
        self.assertIn(fail_message, cm.exception.message)

    def test_3x3_matchups_remoteplatform(self):
        self.base_3x3_matchups(IonObject(OT.RemotePlatformDeploymentContext))

    def test_3x3_matchups_cabledinstrument(self):
        self.base_3x3_matchups(IonObject(OT.CabledInstrumentDeploymentContext))

    def test_3x3_matchups_cablednode(self):
        self.base_3x3_matchups(IonObject(OT.CabledNodeDeploymentContext))

    def base_3x3_matchups(self, deployment_context):
        """
        This will be 1 root platform, 3 sub platforms (2 of one model, 1 of another) and 3 sub instruments each (2-to-1)
        """
        deployment_context_type = type(deployment_context).__name__

        instrument_model_id  = [self.RR2.create(any_old(RT.InstrumentModel)) for _ in range(6)]
        platform_model_id    = [self.RR2.create(any_old(RT.PlatformModel)) for _ in range(3)]

        instrument_device_id = [self.RR2.create(any_old(RT.InstrumentDevice)) for _ in range(9)]
        platform_device_id   = [self.RR2.create(any_old(RT.PlatformDevice)) for _ in range(4)]

        instrument_site_id   = [self.RR2.create(any_old(RT.InstrumentSite,
                                                {"planned_uplink_port":
                                                     IonObject(OT.PlatformPort,
                                                               reference_designator="instport_%d" % (i+1))}))
                                for i in range(9)]

        platform_site_id     = [self.RR2.create(any_old(RT.PlatformSite,
                                                {"planned_uplink_port":
                                                    IonObject(OT.PlatformPort,
                                                              reference_designator="platport_%d" % (i+1))}))
                                for i in range(4)]



        def instrument_model_at(platform_idx, instrument_idx):
            m = platform_idx * 2
            if instrument_idx > 0:
                m += 1
            return m

        def platform_model_at(platform_idx):
            if platform_idx > 0:
                return 1
            return 0

        def instrument_at(platform_idx, instrument_idx):
            return platform_idx * 3 + instrument_idx

        # set up the structure
        for p in range(3):
            m = platform_model_at(p)
            self.RR2.assign_platform_model_to_platform_site_with_has_model(platform_model_id[m], platform_site_id[p])
            self.RR2.assign_platform_model_to_platform_device_with_has_model(platform_model_id[m], platform_device_id[p])
            self.RR2.assign_platform_device_to_platform_device_with_has_device(platform_device_id[p], platform_device_id[3])
            self.RR2.assign_platform_site_to_platform_site_with_has_site(platform_site_id[p], platform_site_id[3])

            for i in range(3):
                m = instrument_model_at(p, i)
                idx = instrument_at(p, i)
                self.RR2.assign_instrument_model_to_instrument_site_with_has_model(instrument_model_id[m], instrument_site_id[idx])
                self.RR2.assign_instrument_model_to_instrument_device_with_has_model(instrument_model_id[m], instrument_device_id[idx])
                self.RR2.assign_instrument_device_to_platform_device_with_has_device(instrument_device_id[idx], platform_device_id[p])
                self.RR2.assign_instrument_site_to_platform_site_with_has_site(instrument_site_id[idx], platform_site_id[p])

        # top level models
        self.RR2.assign_platform_model_to_platform_device_with_has_model(platform_model_id[2], platform_device_id[3])
        self.RR2.assign_platform_model_to_platform_site_with_has_model(platform_model_id[2], platform_site_id[3])



        # verify structure
        for p in range(3):
            parent_id = self.RR2.find_platform_device_id_by_platform_device_using_has_device(platform_device_id[p])
            self.assertEqual(platform_device_id[3], parent_id)

            parent_id = self.RR2.find_platform_site_id_by_platform_site_using_has_site(platform_site_id[p])
            self.assertEqual(platform_site_id[3], parent_id)

        for i in range(len(platform_site_id)):
            self.assertEqual(self.RR2.find_platform_model_of_platform_device_using_has_model(platform_device_id[i]),
                             self.RR2.find_platform_model_of_platform_site_using_has_model(platform_site_id[i]))

        for i in range(len(instrument_site_id)):
            self.assertEqual(self.RR2.find_instrument_model_of_instrument_device_using_has_model(instrument_device_id[i]),
                             self.RR2.find_instrument_model_of_instrument_site_using_has_model(instrument_site_id[i]))


        port_assignments = {}
        for p in range(3):
            port_assignments[platform_device_id[p]] = "platport_%d" % (p+1)
            for i in range(3):
                idx = instrument_at(p, i)
                port_assignments[instrument_device_id[idx]] = "instport_%d" % (idx+1)

        deployment_id = self.RR2.create(any_old(RT.Deployment,
                {"context": deployment_context,
                 "port_assignments": port_assignments}))


        log.debug("assigning device/site to %s deployment", deployment_context_type)
        if OT.RemotePlatformDeploymentContext == deployment_context_type:
            self.RR2.assign_deployment_to_platform_device_with_has_deployment(deployment_id, platform_device_id[3])
            self.RR2.assign_deployment_to_platform_site_with_has_deployment(deployment_id, platform_site_id[3])

        elif OT.CabledInstrumentDeploymentContext == deployment_context_type:
            self.RR2.assign_deployment_to_instrument_device_with_has_deployment(deployment_id, instrument_device_id[1])
            self.RR2.assign_deployment_to_instrument_site_with_has_deployment(deployment_id, instrument_site_id[1])

        elif OT.CabledNodeDeploymentContext == deployment_context_type:
            self.RR2.assign_deployment_to_platform_device_with_has_deployment(deployment_id, platform_device_id[1])
            self.RR2.assign_deployment_to_platform_site_with_has_deployment(deployment_id, platform_site_id[1])

        log.debug("activation of %s deployment", deployment_context_type)
        self.omsclient.activate_deployment(deployment_id)

        log.debug("validation of %s deployment", deployment_context_type)
        if OT.RemotePlatformDeploymentContext == deployment_context_type:
            # verify proper associations
            for i, d in enumerate(platform_device_id):
                self.assertEqual(d, self.RR2.find_platform_device_id_of_platform_site_using_has_device(platform_site_id[i]))

            for i, d in enumerate(instrument_device_id):
                self.assertEqual(d, self.RR2.find_instrument_device_id_of_instrument_site_using_has_device(instrument_site_id[i]))

        elif OT.CabledInstrumentDeploymentContext == deployment_context_type:
            self.assertEqual(instrument_device_id[1],
                             self.RR2.find_instrument_device_id_of_instrument_site_using_has_device(instrument_site_id[1]))

        elif OT.CabledNodeDeploymentContext == deployment_context_type:
            expected_platforms = [1]
            expected_instruments = [3, 4, 5]

            # verify proper associations
            for i, d in enumerate(platform_device_id):
                self.assertEqual(i in expected_platforms,
                                 d in self.RR2.find_platform_device_ids_of_platform_site_using_has_device(platform_site_id[i]))

            for i, d in enumerate(instrument_device_id):
                self.assertEqual(i in expected_instruments,
                                 d in self.RR2.find_instrument_device_ids_of_instrument_site_using_has_device(instrument_site_id[i]))
示例#31
0
class TestObservatoryManagementFullIntegration(IonIntegrationTestCase):
    def setUp(self):
        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.RR = ResourceRegistryServiceClient()
        self.RR2 = EnhancedResourceRegistryClient(self.RR)
        self.OMS = ObservatoryManagementServiceClient()
        self.org_management_service = OrgManagementServiceClient()
        self.IMS = InstrumentManagementServiceClient()
        self.dpclient = DataProductManagementServiceClient()
        self.pubsubcli = PubsubManagementServiceClient()
        self.damsclient = DataAcquisitionManagementServiceClient()
        self.dataset_management = DatasetManagementServiceClient()
        self.data_retriever = DataRetrieverServiceClient()
        self.data_product_management = DataProductManagementServiceClient()

        self._load_stage = 0
        self._resources = {}

    def preload_ooi(self, stage=STAGE_LOAD_ASSETS):
        # Preloads OOI up to a given stage

        if self._load_stage >= stage:
            return

        if self._load_stage < STAGE_LOAD_ORGS:
            log.info(
                "--------------------------------------------------------------------------------------------------------"
            )
            log.info("Preloading stage: %s (OOIR2 Orgs, users, roles)",
                     STAGE_LOAD_ORGS)
            # load_OOIR2_scenario
            self.container.spawn_process("Loader",
                                         "ion.processes.bootstrap.ion_loader",
                                         "IONLoader",
                                         config=dict(
                                             op="load",
                                             scenario="OOIR2",
                                             path="master",
                                         ))
            self._load_stage = STAGE_LOAD_ORGS

        if self._load_stage < STAGE_LOAD_PARAMS:
            log.info(
                "--------------------------------------------------------------------------------------------------------"
            )
            log.info("Preloading stage: %s (BASE params, streamdefs)",
                     STAGE_LOAD_PARAMS)
            # load_parameter_scenarios
            self.container.spawn_process(
                "Loader",
                "ion.processes.bootstrap.ion_loader",
                "IONLoader",
                config=dict(
                    op="load",
                    scenario="BETA",
                    path="master",
                    categories=
                    "ParameterFunctions,ParameterDefs,ParameterDictionary,StreamDefinition",
                    clearcols="owner_id,org_ids",
                    assets="res/preload/r2_ioc/ooi_assets",
                    parseooi="True",
                ))
            self._load_stage = STAGE_LOAD_PARAMS

        if self._load_stage < STAGE_LOAD_AGENTS:
            log.info(
                "--------------------------------------------------------------------------------------------------------"
            )
            log.info("Preloading stage: %s (OOIR2_I agents, model links)",
                     STAGE_LOAD_AGENTS)
            # load_OOIR2_agents
            self.container.spawn_process("Loader",
                                         "ion.processes.bootstrap.ion_loader",
                                         "IONLoader",
                                         config=dict(
                                             op="load",
                                             scenario="OOIR2_I",
                                             path="master",
                                         ))
            self._load_stage = STAGE_LOAD_AGENTS

        if self._load_stage < STAGE_LOAD_ASSETS:
            log.info(
                "--------------------------------------------------------------------------------------------------------"
            )
            log.info(
                "Preloading stage: %s (OOI assets linked to params, agents)",
                STAGE_LOAD_ASSETS)
            # load_ooi_assets
            self.container.spawn_process(
                "Loader",
                "ion.processes.bootstrap.ion_loader",
                "IONLoader",
                config=dict(
                    op="load",
                    loadooi="True",
                    path="master",
                    assets="res/preload/r2_ioc/ooi_assets",
                    bulk="True",
                    debug="True",
                    ooiuntil="9/1/2013",
                    ooiparams="True",
                    #excludecategories: DataProduct,DataProductLink,Deployment,Workflow,WorkflowDefinition
                ))
            self._load_stage = STAGE_LOAD_ASSETS

        # 'DataProduct,DataProductLink,WorkflowDefinition,ExternalDataProvider,ExternalDatasetModel,ExternalDataset,ExternalDatasetAgent,ExternalDatasetAgentInstance',

    @unittest.skip('Work in progress')
    def test_observatory(self):
        self._load_stage = 0
        self._resources = {}
        passing = True

        self.assertTrue(True)

        # LOAD STEP 1
        self.preload_ooi(stage=STAGE_LOAD_ORGS)

        passing &= self.orguserrole_assertions()

        # LOAD STEP 2
        self.preload_ooi(stage=STAGE_LOAD_PARAMS)

        passing &= self.parameter_assertions()

        # LOAD STEP 3
        self.preload_ooi(stage=STAGE_LOAD_AGENTS)

        passing &= self.agent_assertions()

        # LOAD STEP 4
        self.preload_ooi(stage=STAGE_LOAD_ASSETS)

        # Check OOI preloaded resources to see if they match needs for this test and for correctness
        passing &= self.sites_assertions()
        passing &= self.device_assertions()
        passing &= self.deployment_assertions()

        # Extensive tests on select RSN nodes
        passing &= self.rsn_node_checks()

        # Extensive tests on select RSN instruments
        passing &= self.check_rsn_instrument()

        passing &= self.check_rsn_instrument_data_product()

        # Extensive tests on a glider
        #passing &= self.check_glider()

        # Extensive tests on a CG assembly
        #passing &= self.check_cg_assembly()

        # Add a new instrument agent
        # Add a new instrument agent instance
        # Check DataProducts
        # Check Provenance

        IonIntegrationTestCase.assertTrue(self, passing)

    # -------------------------------------------------------------------------

    def orguserrole_assertions(self):
        passing = True

        passing &= self._check_marine_facility("MF_CGSN")
        passing &= self._check_marine_facility("MF_RSN")
        passing &= self._check_marine_facility("MF_EA")

        return passing

    def _check_marine_facility(self, preload_id):
        passing = True
        log.debug("Checking marine facility %s and associations", preload_id)

        mf_obj = self.retrieve_ooi_asset(preload_id)
        mf_id = mf_obj._id
        self._resources[preload_id] = mf_id

        passing &= self.assertEquals(mf_obj.lcstate, LCS.DEPLOYED)

        res_list, _ = self.RR.find_objects(subject=mf_id,
                                           predicate=PRED.hasMembership,
                                           id_only=True)
        passing &= self.assertTrue(len(res_list) >= 3)

        res_list, _ = self.RR.find_objects(subject=mf_id,
                                           predicate=PRED.hasRole,
                                           id_only=False)
        passing &= self.assertTrue(len(res_list) >= 5)

        passing &= self._check_role_assignments(res_list, "ORG_MANAGER")
        passing &= self._check_role_assignments(res_list,
                                                "OBSERVATORY_OPERATOR")
        passing &= self._check_role_assignments(res_list,
                                                "INSTRUMENT_OPERATOR")

        return passing

    def _check_role_assignments(self, role_list, role_name):
        passing = True
        role_obj = self._find_resource_in_list(role_list, "governance_name",
                                               role_name)
        if role_obj:
            res_list = self.RR.find_subjects(predicate=PRED.hasRole,
                                             object=role_obj._id,
                                             id_only=True)
            passing &= self.assertTrue(len(res_list) >= 1)

        return passing

    def parameter_assertions(self):
        passing = True

        pctx_list, _ = self.RR.find_resources_ext(restype=RT.ParameterContext)
        passing &= self.assertTrue(len(pctx_list) >= 10)

        pdict_list, _ = self.RR.find_resources_ext(
            restype=RT.ParameterDictionary)
        passing &= self.assertTrue(len(pdict_list) >= 10)

        sdef_list, _ = self.RR.find_resources_ext(restype=RT.StreamDefinition)
        passing &= self.assertTrue(len(sdef_list) >= 10)

        # Verify that a PDict has the appropriate QC parameters defined
        pdicts, _ = self.RR.find_resources_ext(restype=RT.ParameterDictionary,
                                               alt_id_ns='PRE',
                                               alt_id='DICT110')
        passing &= self.assertTrue(len(pdicts) == 1)
        if not pdicts:
            return passing
        pdict = pdicts[0]

        # According to the latest SAF, density should NOT have trend

        parameters, _ = self.RR.find_objects(pdict, PRED.hasParameterContext)
        names = [i.name for i in parameters if i.name.startswith('density')]
        passing &= self.assertTrue('density_trndtst_qc' not in names)

        return passing

    def agent_assertions(self):
        passing = True

        # TODO: More tests?

        return passing

    def sites_assertions(self):
        passing = True
        observatory_list, _ = self.RR.find_resources_ext(
            restype=RT.Observatory)
        passing &= self.assertTrue(len(observatory_list) >= 40)
        for obs in observatory_list:
            passing &= self.assertEquals(obs.lcstate, LCS.DEPLOYED)

        platform_site_list, _ = self.RR.find_resources(RT.PlatformSite,
                                                       id_only=False)
        log.debug('platform sites: %s', [ps.name for ps in platform_site_list])
        passing &= self.assertTrue(len(platform_site_list) >= 30)

        return passing

    def device_assertions(self):
        passing = True
        platform_device_list, _ = self.RR.find_resources(RT.PlatformDevice,
                                                         id_only=False)
        passing &= self.assertTrue(len(platform_device_list) >= 30)
        for pdev in platform_device_list:
            log.debug('platform device: %s', pdev.name)
            passing &= self.assertEquals(pdev.lcstate, LCS.PLANNED)

        platform_agent_list, _ = self.RR.find_resources(RT.PlatformAgent,
                                                        id_only=False)
        passing &= self.assertTrue(len(platform_agent_list) >= 2)
        for pagent in platform_agent_list:
            log.debug('platform agent: %s', pagent.name)
            passing &= self.assertEquals(pagent.lcstate, LCS.DEPLOYED)

        instrument_agent_list, _ = self.RR.find_resources(RT.InstrumentAgent,
                                                          id_only=False)
        passing &= self.assertTrue(len(instrument_agent_list) >= 3)
        for iagent in instrument_agent_list:
            log.debug('instrument agent: %s', iagent.name)
            passing &= self.assertEquals(iagent.lcstate, LCS.DEPLOYED)

            model_list, _ = self.RR.find_objects(subject=iagent._id,
                                                 predicate=PRED.hasModel,
                                                 id_only=True)
            passing &= self.assertTrue(
                len(model_list) >= 1, "IA %s" % iagent.name)

        return passing

    def deployment_assertions(self):
        passing = True
        deployment_list, _ = self.RR.find_resources(RT.Deployment,
                                                    id_only=False)
        passing &= self.assertTrue(len(deployment_list) >= 30)
        for deploy in deployment_list:
            log.debug('deployment: %s', deploy.name)
            passing &= self.assertEquals(deploy.lcstate, LCS.DEPLOYED)
        return passing

    def rsn_node_checks(self):
        """
        Current preload creates:
        - PlatformDevice in PLANNED
        - PlatformSite in DEPLOYED
        - Deployment in DEPLOYED
        - Deployment is NOT activated
        """
        passing = True

        dp_obj = self.retrieve_ooi_asset("CE04OSHY-PN01C_DEP")

        passing &= self.assertEquals(dp_obj.lcstate, LCS.DEPLOYED)
        passing &= self.assertEquals(dp_obj.availability, AS.AVAILABLE)
        log.debug(
            'test_observatory  retrieve CE04OSHY-PN01C_DEP deployment:  %s',
            dp_obj)

        # Check existing RSN node CE04OSHY-LV01C Deployment (PLANNED lcstate)
        CE04OSHY_LV01C_deployment = self.retrieve_ooi_asset(
            'CE04OSHY-LV01C_DEP')
        passing &= self.assertEquals(CE04OSHY_LV01C_deployment.lcstate,
                                     LCS.DEPLOYED)
        passing &= self.assertEquals(CE04OSHY_LV01C_deployment.availability,
                                     AS.AVAILABLE)

        #self.dump_deployment(CE04OSHY_LV01C_deployment._id)
        log.debug(
            'test_observatory  retrieve RSN node CE04OSHY-LV01C Deployment:  %s',
            CE04OSHY_LV01C_deployment)

        CE04OSHY_LV01C_device = self.retrieve_ooi_asset('CE04OSHY-LV01C_PD')

        # Set CE04OSHY-LV01C device to DEVELOPED state
        passing &= self.transition_lcs_then_verify(
            resource_id=CE04OSHY_LV01C_device._id,
            new_lcs_state=LCE.DEVELOP,
            verify=LCS.DEVELOPED)

        # Set CE04OSHY-LV01C device to INTEGRATED state
        passing &= self.transition_lcs_then_verify(
            resource_id=CE04OSHY_LV01C_device._id,
            new_lcs_state=LCE.INTEGRATE,
            verify=LCS.INTEGRATED)

        # Set CE04OSHY-LV01C device to DEPLOYED state
        passing &= self.transition_lcs_then_verify(
            resource_id=CE04OSHY_LV01C_device._id,
            new_lcs_state=LCE.DEPLOY,
            verify=LCS.DEPLOYED)

        # Set CE04OSHY-LV01C Deployment to DEPLOYED state
        # NOTE: Deployments are created in DEPLOYED state, currently
        #self.transition_lcs_then_verify(resource_id=CE04OSHY_LV01C_deployment._id, new_lcs_state=LCE.DEPLOY, verify='DEPLOYED')

        # Activate Deployment for CE04OSHY-LV01C
        self.OMS.activate_deployment(CE04OSHY_LV01C_deployment._id)
        log.debug(
            '---------    activate_deployment CE04OSHY_LV01C_deployment -------------- '
        )
        self.dump_deployment(CE04OSHY_LV01C_deployment._id)
        passing &= self.validate_deployment_activated(
            CE04OSHY_LV01C_deployment._id)

        # (optional) Start CE04OSHY-LV01C platform agent with simulator

        # NOTE: DataProduct is generated in DEPLOYED state
        # # Set DataProduct for CE04OSHY-LV01C platform to DEPLOYED state
        # output_data_product_ids, assns =self.RR.find_objects(subject=CE04OSHY_LV01C_device._id, predicate=PRED.hasOutputProduct, id_only=True)
        # if output_data_product_ids:
        #     #self.assertEquals(len(child_devs), 3)
        #     for output_data_product_id in output_data_product_ids:
        #         log.debug('DataProduct for CE04OSHY-LV01C platform:  %s', output_data_product_id)
        #         self.transition_lcs_then_verify(resource_id=output_data_product_id, new_lcs_state=LCE.DEPLOY, verify='DEPLOYED')

        # Check events for CE04OSHY-LV01C platform

        # Check existing CE04OSBP-LJ01C Deployment (PLANNED lcstate)
        #        dp_list, _  = self.RR.find_resources_ext(alt_id_ns="PRE", alt_id="CE04OSBP-LJ01C_DEP")
        #        self.assertEquals(len(dp_list), 1)
        #        CE04OSHY_LV01C_deployment = dp_list[0]
        #        self.assertEquals(CE04OSHY_LV01C_deployment.lcstate, 'PLANNED')
        #        log.debug('test_observatory  retrieve RSN node CE04OSBP-LJ01C Deployment:  %s', CE04OSHY_LV01C_deployment)

        # Set CE04OSBP-LJ01C Deployment to DEPLOYED state

        # Update description and other attributes for CE04OSBP-LJ01C device resource

        # Create attachment (JPG image) for CE04OSBP-LJ01C device resource

        # Activate Deployment for CE04OSBP-LJ01C

        # (optional) Add/register CE04OSBP-LJ01C platform agent to parent agent

        # (optional) Start CE04OSBP-LJ01C platform agent

        return passing

    def check_rsn_instrument(self):
        """
        Check existing RSN instrument CE04OSBP-LJ01C-06-CTDBPO108 Deployment (PLANNED lcstate)
        Current preload creates:
        - InstrumentDevice in PLANNED
        - InstrumentSite in DEPLOYED
        - Deployment in DEPLOYED
        - Deployment is activated
        """

        passing = True
        CE04OSBP_LJ01C_06_CTDBPO108_deploy = self.retrieve_ooi_asset(
            'CE04OSBP-LJ01C-06-CTDBPO108_DEP')
        self.dump_deployment(CE04OSBP_LJ01C_06_CTDBPO108_deploy._id)
        #passing &= self.assertEquals(CE04OSBP_LJ01C_06_CTDBPO108_deploy.lcstate, 'PLANNED')

        # Set CE04OSBP-LJ01C-06-CTDBPO108 device to DEVELOPED state
        CE04OSBP_LJ01C_06_CTDBPO108_device = self.retrieve_ooi_asset(
            'CE04OSBP-LJ01C-06-CTDBPO108_ID')
        passing &= self.transition_lcs_then_verify(
            resource_id=CE04OSBP_LJ01C_06_CTDBPO108_device._id,
            new_lcs_state=LCE.DEVELOP,
            verify='DEVELOPED')

        # Set CE04OSBP-LJ01C-06-CTDBPO108 device to INTEGRATED state
        passing &= self.transition_lcs_then_verify(
            resource_id=CE04OSBP_LJ01C_06_CTDBPO108_device._id,
            new_lcs_state=LCE.INTEGRATE,
            verify='INTEGRATED')

        # Set CE04OSBP-LJ01C-06-CTDBPO108 device to DEPLOYED state
        passing &= self.transition_lcs_then_verify(
            resource_id=CE04OSBP_LJ01C_06_CTDBPO108_device._id,
            new_lcs_state=LCE.DEPLOY,
            verify='DEPLOYED')

        # Set CE04OSBP-LJ01C-06-CTDBPO108 Deployment to DEPLOYED state
        #self.transition_lcs_then_verify(resource_id=CE04OSBP_LJ01C_06_CTDBPO108_deploy._id, new_lcs_state=LCE.DEPLOY, verify='DEPLOYED')

        # Activate Deployment for CE04OSBP-LJ01C-06-CTDBPO108 instrument
        log.debug(
            '---------    activate_deployment CE04OSBP-LJ01C-06-CTDBPO108 deployment -------------- '
        )
        self.OMS.activate_deployment(CE04OSBP_LJ01C_06_CTDBPO108_deploy._id)
        passing &= self.validate_deployment_activated(
            CE04OSBP_LJ01C_06_CTDBPO108_deploy._id)

        # (optional) Add/register CE04OSBP-LJ01C-06-CTDBPO108 instrument agent to parent agent

        # (optional) Start CE04OSBP-LJ01C-06-CTDBPO108 instrument agent with simulator

        # Set all DataProducts for CE04OSBP-LJ01C-06-CTDBPO108 to DEPLOYED state

        # (optional) Create a substitute Deployment for site CE04OSBP-LJ01C-06-CTDBPO108 with a comparable device
        CE04OSBP_LJ01C_06_CTDBPO108_isite = self.retrieve_ooi_asset(
            'CE04OSBP-LJ01C-06-CTDBPO108')

        ## create device here: retrieve CTD Mooring on Mooring Riser 001 - similiar?
        GP03FLMB_RI001_10_CTDMOG999_ID_idevice = self.retrieve_ooi_asset(
            'GP03FLMB-RI001-10-CTDMOG999_ID')

        deploy_id_2 = self.create_basic_deployment(
            name='CE04OSBP-LJ01C-06-CTDBPO108_DEP2',
            description=
            'substitute Deployment for site CE04OSBP-LJ01C-06-CTDBPO108 with a comparable device'
        )
        self.OMS.assign_device_to_deployment(
            instrument_device_id=GP03FLMB_RI001_10_CTDMOG999_ID_idevice._id,
            deployment_id=deploy_id_2)
        self.OMS.assign_site_to_deployment(
            instrument_site_id=CE04OSBP_LJ01C_06_CTDBPO108_isite._id,
            deployment_id=deploy_id_2)
        self.dump_deployment(deploy_id_2)

        # (optional) Activate this second deployment - check first deployment is deactivated
        self.OMS.deactivate_deployment(CE04OSBP_LJ01C_06_CTDBPO108_deploy._id)
        passing &= self.validate_deployment_deactivated(
            CE04OSBP_LJ01C_06_CTDBPO108_deploy._id)

        # log.debug('Activate deployment deploy_id_2')
        # self.get_deployment_ids(deploy_id_2)
        # self.dump_deployment(deploy_id_2, "deploy_id_2")
        # self.OMS.activate_deployment(deploy_id_2)
        # passing &= self.validate_deployment_deactivated(CE04OSBP_LJ01C_06_CTDBPO108_deploy._id)
        #
        # # (optional) Set first CE04OSBP-LJ01C-06-CTDBPO108 Deployment to INTEGRATED state
        # passing &= self.transition_lcs_then_verify(resource_id=CE04OSBP_LJ01C_06_CTDBPO108_deploy._id, new_lcs_state=LCE.INTEGRATE, verify='INTEGRATED')
        #
        # # Set first CE04OSBP-LJ01C-06-CTDBPO108 device to INTEGRATED state
        # passing &= self.transition_lcs_then_verify(resource_id=CE04OSBP_LJ01C_06_CTDBPO108_device._id, new_lcs_state=LCE.INTEGRATE, verify='INTEGRATED')
        #
        #
        # # (optional) Create a third Deployment for site CE04OSBP-LJ01C-06-CTDBPO108 with a same device from first deployment
        # deploy_id_3 = self.create_basic_deployment(name='CE04OSBP-LJ01C-06-CTDBPO108_DEP3', description='substitute Deployment for site CE04OSBP-LJ01C-06-CTDBPO108 with same device as first')
        # self.IMS.deploy_instrument_device(instrument_device_id=GP03FLMB_RI001_10_CTDMOG999_ID_idevice._id, deployment_id=deploy_id_3)
        # self.OMS.deploy_instrument_site(instrument_site_id=CE04OSBP_LJ01C_06_CTDBPO108_isite._id, deployment_id=deploy_id_3)
        # self.dump_deployment(deploy_id_3)
        #
        #
        # # Set first CE04OSBP-LJ01C-06-CTDBPO108 device to DEPLOYED state
        # passing &= self.transition_lcs_then_verify(resource_id=CE04OSBP_LJ01C_06_CTDBPO108_device._id, new_lcs_state=LCE.DEPLOY, verify='DEPLOYED')
        #
        # # (optional) Activate this third deployment - check second deployment is deactivated
        # log.debug('Activate deployment deploy_id_3')
        # self.dump_deployment(deploy_id_3)
        # self.OMS.activate_deployment(deploy_id_3)
        # #todo: check second deployment is deactivated

        return passing

    def check_data_product_reference(self, reference_designator, output=[]):
        passing = True

        data_product_ids, _ = self.RR.find_resources_ext(
            alt_id_ns='PRE',
            alt_id='%s_DPI1' % reference_designator,
            id_only=True)  # Assuming DPI1 is parsed
        passing &= self.assertEquals(len(data_product_ids), 1)

        if not data_product_ids:
            return passing

        # Let's go ahead and activate it
        data_product_id = data_product_ids[0]
        self.dpclient.activate_data_product_persistence(data_product_id)
        self.addCleanup(self.dpclient.suspend_data_product_persistence,
                        data_product_id)

        dataset_ids, _ = self.RR.find_objects(data_product_id,
                                              PRED.hasDataset,
                                              id_only=True)
        passing &= self.assertEquals(len(dataset_ids), 1)
        if not dataset_ids:
            return passing
        dataset_id = dataset_ids[0]

        stream_def_ids, _ = self.RR.find_objects(data_product_id,
                                                 PRED.hasStreamDefinition,
                                                 id_only=True)
        passing &= self.assertEquals(len(dataset_ids), 1)
        if not stream_def_ids:
            return passing
        stream_def_id = stream_def_ids[0]
        output.append((data_product_id, stream_def_id, dataset_id))
        return passing

    def check_tempsf_instrument_data_product(self, reference_designator):
        passing = True
        info_list = []
        passing &= self.check_data_product_reference(reference_designator,
                                                     info_list)
        if not passing: return passing
        data_product_id, stream_def_id, dataset_id = info_list.pop()

        now = time.time()
        ntp_now = now + 2208988800

        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
        rdt['time'] = [ntp_now]
        rdt['temperature'] = [[
            25.3884, 26.9384, 24.3394, 23.3401, 22.9832, 29.4434, 26.9873,
            15.2883, 16.3374, 14.5883, 15.7253, 18.4383, 15.3488, 17.2993,
            10.2111, 11.5993, 10.9345, 9.4444, 9.9876, 10.9834, 11.0098,
            5.3456, 4.2994, 4.3009
        ]]

        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)
        ParameterHelper.publish_rdt_to_data_product(data_product_id, rdt)
        passing &= self.assertTrue(dataset_monitor.wait())
        if not passing: return passing

        granule = self.data_retriever.retrieve(dataset_id)
        rdt = RecordDictionaryTool.load_from_granule(granule)
        passing &= self.assert_array_almost_equal(rdt['time'], [ntp_now])
        passing &= self.assert_array_almost_equal(rdt['temperature'], [[
            25.3884, 26.9384, 24.3394, 23.3401, 22.9832, 29.4434, 26.9873,
            15.2883, 16.3374, 14.5883, 15.7253, 18.4383, 15.3488, 17.2993,
            10.2111, 11.5993, 10.9345, 9.4444, 9.9876, 10.9834, 11.0098,
            5.3456, 4.2994, 4.3009
        ]])
        return passing

    def check_trhph_instrument_data_products(self, reference_designator):
        passing = True
        info_list = []
        passing &= self.check_data_product_reference(reference_designator,
                                                     info_list)
        if not passing:
            return passing

        data_product_id, stream_def_id, dataset_id = info_list.pop()

        pdict = self.RR2.find_parameter_dictionary_of_stream_definition_using_has_parameter_dictionary(
            stream_def_id)
        passing &= self.assertEquals(pdict.name, 'trhph_sample')

        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)

        # calibration constants
        a = 1.98e-9
        b = -2.45e-6
        c = 9.28e-4
        d = -0.0888
        e = 0.731

        V_s = 1.506
        V_c = 0.
        T = 11.8

        r1 = 0.906
        r2 = 4.095
        r3 = 4.095

        ORP_V = 1.806
        Cl = np.nan

        offset = 2008
        gain = 4.0
        # Normally this would be 50 per the DPS but the precision is %4.0f which truncates the values to the nearest 1...
        ORP = ((ORP_V * 1000.) - offset) / gain

        ntp_now = time.time() + 2208988800

        rdt['cc_a'] = [a]
        rdt['cc_b'] = [b]
        rdt['cc_c'] = [c]
        rdt['cc_d'] = [d]
        rdt['cc_e'] = [e]
        rdt['ref_temp_volts'] = [V_s]
        rdt['resistivity_temp_volts'] = [V_c]
        rdt['eh_sensor'] = [ORP_V]
        rdt['resistivity_5'] = [r1]
        rdt['resistivity_x1'] = [r2]
        rdt['resistivity_x5'] = [r3]
        rdt['cc_offset'] = [offset]
        rdt['cc_gain'] = [gain]
        rdt['time'] = [ntp_now]

        passing &= self.assert_array_almost_equal(rdt['vent_fluid_temperaure'],
                                                  [T], 2)
        passing &= self.assert_array_almost_equal(
            rdt['vent_fluid_chloride_conc'], [Cl], 4)
        passing &= self.assert_array_almost_equal(rdt['vent_fluid_orp'], [ORP],
                                                  4)

        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)
        ParameterHelper.publish_rdt_to_data_product(data_product_id, rdt)
        passing &= self.assertTrue(dataset_monitor.wait())
        if not passing: return passing

        granule = self.data_retriever.retrieve(dataset_id)
        rdt = RecordDictionaryTool.load_from_granule(granule)

        passing &= self.assert_array_almost_equal(rdt['vent_fluid_temperaure'],
                                                  [T], 2)
        passing &= self.assert_array_almost_equal(
            rdt['vent_fluid_chloride_conc'], [Cl], 4)
        passing &= self.assert_array_almost_equal(rdt['vent_fluid_orp'], [ORP],
                                                  4)

        return passing

    def check_vel3d_instrument_data_products(self, reference_designator):
        passing = True
        info_list = []
        passing &= self.check_data_product_reference(reference_designator,
                                                     info_list)
        if not passing:
            return passing
        data_product_id, stream_def_id, dataset_id = info_list.pop()

        pdict = self.RR2.find_parameter_dictionary_of_stream_definition_using_has_parameter_dictionary(
            stream_def_id)
        self.assertEquals(pdict.name, 'vel3d_b_sample')

        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
        lat = 14.6846
        lon = -51.044
        ts = np.array([
            3319563600, 3319567200, 3319570800, 3319574400, 3319578000,
            3319581600, 3319585200, 3319588800, 3319592400, 3319596000
        ],
                      dtype=np.float)

        ve = np.array([-3.2, 0.1, 0., 2.3, -0.1, 5.6, 5.1, 5.8, 8.8, 10.3])

        vn = np.array([18.2, 9.9, 12., 6.6, 7.4, 3.4, -2.6, 0.2, -1.5, 4.1])
        vu = np.array([-1.1, -0.6, -1.4, -2, -1.7, -2, 1.3, -1.6, -1.1, -4.5])
        ve_expected = np.array([
            -0.085136, -0.028752, -0.036007, 0.002136, -0.023158, 0.043218,
            0.056451, 0.054727, 0.088446, 0.085952
        ])
        vn_expected = np.array([
            0.164012, 0.094738, 0.114471, 0.06986, 0.07029, 0.049237,
            -0.009499, 0.019311, 0.012096, 0.070017
        ])
        vu_expected = np.array([
            -0.011, -0.006, -0.014, -0.02, -0.017, -0.02, 0.013, -0.016,
            -0.011, -0.045
        ])

        rdt['time'] = ts
        rdt['lat'] = [lat] * 10
        rdt['lon'] = [lon] * 10
        rdt['turbulent_velocity_east'] = ve
        rdt['turbulent_velocity_north'] = vn
        rdt['turbulent_velocity_up'] = vu

        passing &= self.assert_array_almost_equal(
            rdt['eastward_turbulent_velocity'], ve_expected)
        passing &= self.assert_array_almost_equal(
            rdt['northward_turbulent_velocity'], vn_expected)
        passing &= self.assert_array_almost_equal(
            rdt['upward_turbulent_velocity'], vu_expected)

        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)
        ParameterHelper.publish_rdt_to_data_product(data_product_id, rdt)
        passing &= self.assertTrue(dataset_monitor.wait())
        if not passing: return passing

        granule = self.data_retriever.retrieve(dataset_id)
        rdt = RecordDictionaryTool.load_from_granule(granule)
        passing &= self.assert_array_almost_equal(
            rdt['eastward_turbulent_velocity'], ve_expected)
        passing &= self.assert_array_almost_equal(
            rdt['northward_turbulent_velocity'], vn_expected)
        passing &= self.assert_array_almost_equal(
            rdt['upward_turbulent_velocity'], vu_expected)
        return passing

    def check_presta_instrument_data_products(self, reference_designator):
        # Check the parsed data product make sure it's got everything it needs and can be published persisted etc.

        # Absolute Pressure (SFLPRES_L0) is what comes off the instrumnet, SFLPRES_L1 is a pfunc
        # Let's go ahead and publish some fake data!!!
        # According to https://alfresco.oceanobservatories.org/alfresco/d/d/workspace/SpacesStore/63e16865-9d9e-4b11-b0b3-d5658faa5080/1341-00230_Data_Product_Spec_SFLPRES_OOI.pdf
        # Appendix A. Example 1.
        # p_psia_tide = 14.8670
        # the tide should be 10.2504
        passing = True

        info_list = []
        passing &= self.check_data_product_reference(reference_designator,
                                                     info_list)
        if not passing:
            return passing
        data_product_id, stream_def_id, dataset_id = info_list.pop()

        now = time.time()
        ntp_now = now + 2208988800.

        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
        rdt['time'] = [ntp_now]
        rdt['absolute_pressure'] = [14.8670]
        passing &= self.assert_array_almost_equal(rdt['seafloor_pressure'],
                                                  [10.2504], 4)
        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)

        ParameterHelper.publish_rdt_to_data_product(data_product_id, rdt)
        self.assertTrue(
            dataset_monitor.wait())  # Bumped to 20 to keep buildbot happy
        if not passing: return passing

        granule = self.data_retriever.retrieve(dataset_id)

        rdt = RecordDictionaryTool.load_from_granule(granule)
        passing &= self.assert_array_almost_equal(rdt['time'], [ntp_now])
        passing &= self.assert_array_almost_equal(rdt['seafloor_pressure'],
                                                  [10.2504], 4)
        passing &= self.assert_array_almost_equal(rdt['absolute_pressure'],
                                                  [14.8670], 4)

        return passing

    def check_rsn_instrument_data_product(self):
        passing = True
        # for RS03AXBS-MJ03A-06-PRESTA301 (PREST-A) there are a few listed data products
        # Parsed, Engineering
        # SFLPRES-0 SFLPRES-1
        # Check for the two data products and make sure they have the proper parameters
        # SFLPRES-0 should
        data_products, _ = self.RR.find_resources_ext(
            alt_id_ns='PRE',
            alt_id='RS03AXBS-MJ03A-06-PRESTA301_SFLPRES_L0_DPID',
            id_only=True)
        passing &= self.assertTrue(len(data_products) == 1)
        if not data_products:
            return passing

        data_product_id = data_products[0]

        stream_defs, _ = self.RR.find_objects(data_product_id,
                                              PRED.hasStreamDefinition,
                                              id_only=False)
        passing &= self.assertTrue(len(stream_defs) == 1)
        if not stream_defs:
            return passing

        # Assert that the stream definition has the correct reference designator
        stream_def = stream_defs[0]
        passing &= self.assertEquals(
            stream_def.stream_configuration['reference_designator'],
            'RS03AXBS-MJ03A-06-PRESTA301')

        # Get the pdict and make sure that the parameters corresponding to the available fields
        # begin with the appropriate data product identifier

        pdict_ids, _ = self.RR.find_objects(stream_def,
                                            PRED.hasParameterDictionary,
                                            id_only=True)
        passing &= self.assertEquals(len(pdict_ids), 1)
        if not pdict_ids:
            return passing

        pdict_id = pdict_ids[0]

        pdict = DatasetManagementService.get_parameter_dictionary(pdict_id)
        available_params = [
            pdict.get_context(i) for i in pdict.keys()
            if i in stream_def.available_fields
        ]
        for p in available_params:
            if p.name == 'time':  # Ignore the domain parameter
                continue
            passing &= self.assertTrue(p.ooi_short_name.startswith('SFLPRES'))
        passing &= self.check_presta_instrument_data_products(
            'RS01SLBS-MJ01A-06-PRESTA101')
        passing &= self.check_vel3d_instrument_data_products(
            'RS01SLBS-MJ01A-12-VEL3DB101')
        passing &= self.check_presta_instrument_data_products(
            'RS03AXBS-MJ03A-06-PRESTA301')
        passing &= self.check_vel3d_instrument_data_products(
            'RS03AXBS-MJ03A-12-VEL3DB301')
        passing &= self.check_tempsf_instrument_data_product(
            'RS03ASHS-MJ03B-07-TMPSFA301')
        passing &= self.check_vel3d_instrument_data_products(
            'RS03INT2-MJ03D-12-VEL3DB304')
        passing &= self.check_trhph_instrument_data_products(
            'RS03INT1-MJ03C-10-TRHPHA301')

        self.data_product_management.activate_data_product_persistence(
            data_product_id)
        dataset_id = self.RR2.find_dataset_id_of_data_product_using_has_dataset(
            data_product_id)
        granule = self.data_retriever.retrieve(dataset_id)
        rdt = RecordDictionaryTool.load_from_granule(granule)
        self.assert_array_almost_equal(rdt['seafloor_pressure'], [10.2504], 4)
        self.assert_array_almost_equal(rdt['absolute_pressure'], [14.8670], 4)
        self.data_product_management.suspend_data_product_persistence(
            data_product_id)  # Should do nothing and not raise anything

        return passing

    def check_glider(self):
        '''
        # Check that glider GP05MOAS-GL001 assembly is defined by OOI preload (3 instruments)
        '''
        passing = True
        GP05MOAS_GL001_device = self.retrieve_ooi_asset('GP05MOAS-GL001_PD')
        child_devs, assns = self.RR.find_objects(
            subject=GP05MOAS_GL001_device._id,
            predicate=PRED.hasDevice,
            id_only=True)
        passing &= self.assertEquals(len(child_devs), 3)

        # Set GP05MOAS-GL001 Deployment to DEPLOYED
        GP05MOAS_GL001_deploy = self.retrieve_ooi_asset('GP05MOAS-GL001_DEP')
        passing &= self.transition_lcs_then_verify(
            resource_id=GP05MOAS_GL001_deploy._id,
            new_lcs_state=LCE.DEPLOY,
            verify='DEPLOYED')

        # Activate Deployment for GP05MOAS-GL001
        #self.OMS.activate_deployment(GP05MOAS_GL001_deploy._id)

        # Deactivate Deployment for GP05MOAS-GL001
        #self.OMS.deactivate_deployment(GP05MOAS_GL001_deploy._id)

        # Create a new Deployment resource X without any assignment
        x_deploy_id = self.create_basic_deployment(
            name='X_Deployment',
            description='new Deployment resource X without any assignment')

        # Assign Deployment X to site GP05MOAS-GL001
        GP05MOAS_GL001_psite = self.retrieve_ooi_asset('GP05MOAS-GL001')
        self.OMS.assign_site_to_deployment(GP05MOAS_GL001_psite._id,
                                           x_deploy_id)

        # Assign Deployment X to first device for GP05MOAS-GL001
        GP05MOAS_GL001_device = self.retrieve_ooi_asset('GP05MOAS-GL001_PD')
        self.OMS.assign_device_to_deployment(GP05MOAS_GL001_device._id,
                                             x_deploy_id)

        # Set GP05MOAS-GL001 Deployment to PLANNED state
        #self.transition_lcs_then_verify(resource_id=x_deploy_id, new_lcs_state=LCE.PLAN, verify='PLANNED')
        # ??? already in planned

        # Set second GP05MOAS-GL001 Deployment to DEPLOYED
        passing &= self.transition_lcs_then_verify(resource_id=x_deploy_id,
                                                   new_lcs_state=LCE.DEPLOY,
                                                   verify='DEPLOYED')
        self.dump_deployment(x_deploy_id)

        # Activate second Deployment for GP05MOAS-GL001
        #self.OMS.activate_deployment(x_deploy_id)

        # Deactivate second Deployment for GP05MOAS-GL001
        #self.OMS.deactivate_deployment(x_deploy_id)
        return passing

    def check_cg_assembly(self):
        passing = True

        # Set several CE01ISSM-RI002-* instrument devices to DEVELOPED state

        # Assemble several CE01ISSM-RI002-* instruments to a CG CE01ISSM-RI002 component platform

        # Set several CE01ISSM-RI002-* instrument devices to INTEGRATED state

        # Assemble CE01ISSM-RI002 platform to CG CE01ISSM-LM001 station platform

        # Set CE01ISSM-RI002 component device to INTEGRATED state

        # Set CE01ISSM-LM001 station device to INTEGRATED state

        # Set CE01ISSM-LM001 station device to DEPLOYED state (children maybe too?)

        # Set CE01ISSM-LM001 Deployment to DEPLOYED

        # Activate CE01ISSM-LM001 platform assembly deployment

        # Dectivate CE01ISSM-LM001 platform assembly deployment

        # Set CE01ISSM-LM001 Deployment to INTEGRATED state

        # Set CE01ISSM-LM001 station device to INTEGRATED state

        # Set CE01ISSM-RI002 component device to INTEGRATED state

        # Set CE01ISSM-RI002 component device to INTEGRATED state

        # Disassemble CE01ISSM-RI002 platform from CG CE01ISSM-LM001 station platform

        # Disassemble all CE01ISSM-RI002-* instruments from a CG CE01ISSM-RI002 component platform

        # Retire instrument one for CE01ISSM-RI002-*

        # Retire device one for CE01ISSM-RI002

        # Retire device one for CE01ISSM-LM001

        return passing

    # -------------------------------------------------------------------------

    def retrieve_ooi_asset(self, alt_id='', namespace='PRE'):
        dp_list, _ = self.RR.find_resources_ext(alt_id_ns=namespace,
                                                alt_id=alt_id)
        self.assertEquals(len(dp_list), 1)
        return dp_list[0]

    def transition_lcs_then_verify(self, resource_id, new_lcs_state, verify):
        ret = self.RR2.advance_lcs(resource_id, new_lcs_state)
        resource_obj = self.RR.read(resource_id)
        return self.assertEquals(resource_obj.lcstate, verify)

    def create_basic_deployment(self, name='', description=''):
        start = IonTime(datetime.datetime(2013, 1, 1))
        end = IonTime(datetime.datetime(2014, 1, 1))
        temporal_bounds = IonObject(OT.TemporalBounds,
                                    name='planned',
                                    start_datetime=start.to_string(),
                                    end_datetime=end.to_string())
        deployment_obj = IonObject(RT.Deployment,
                                   name=name,
                                   description=description,
                                   context=IonObject(
                                       OT.CabledNodeDeploymentContext),
                                   constraint_list=[temporal_bounds])
        return self.OMS.create_deployment(deployment_obj)

    def validate_deployment_activated(self, deployment_id=''):
        site_id, device_id = self.get_deployment_ids(deployment_id)
        assocs = self.RR.find_associations(subject=site_id,
                                           predicate=PRED.hasDevice,
                                           object=device_id)
        return self.assertEquals(len(assocs), 1)

    def validate_deployment_deactivated(self, deployment_id=''):
        site_id, device_id = self.get_deployment_ids(deployment_id)
        assocs = self.RR.find_associations(subject=site_id,
                                           predicate=PRED.hasDevice,
                                           object=device_id)
        return self.assertEquals(len(assocs), 0)

    def dump_deployment(self, deployment_id='', name=""):
        #site_id, device_id = self.get_deployment_ids(deployment_id)
        resource_list, _ = self.RR.find_subjects(predicate=PRED.hasDeployment,
                                                 object=deployment_id,
                                                 id_only=True)
        resource_list.append(deployment_id)
        resources = self.RR.read_mult(resource_list)
        log.debug('---------   dump_deployment %s summary---------------',
                  name)
        for resource in resources:
            log.debug('%s: %s (%s)', resource._get_type(), resource.name,
                      resource._id)

        log.debug('---------   dump_deployment %s full dump ---------------',
                  name)

        for resource in resources:
            log.debug('resource: %s ', resource)
        log.debug('---------   dump_deployment %s end  ---------------', name)

        #assocs = self.container.resource_registry.find_assoctiations(anyside=deployment_id)
#        assocs = Container.instance.resource_registry.find_assoctiations(anyside=deployment_id)
#        log.debug('---------   dump_deployment  ---------------')
#        for assoc in assocs:
#            log.debug('SUBJECT: %s      PREDICATE: %s OBJET: %s', assoc.s, assoc.p, assoc.o)
#        log.debug('---------   dump_deployment  end  ---------------')

    def get_deployment_ids(self, deployment_id=''):
        devices = []
        sites = []
        idevice_list, _ = self.RR.find_subjects(RT.InstrumentDevice,
                                                PRED.hasDeployment,
                                                deployment_id,
                                                id_only=True)
        pdevice_list, _ = self.RR.find_subjects(RT.PlatformDevice,
                                                PRED.hasDeployment,
                                                deployment_id,
                                                id_only=True)
        devices = idevice_list + pdevice_list
        self.assertEquals(1, len(devices))
        isite_list, _ = self.RR.find_subjects(RT.InstrumentSite,
                                              PRED.hasDeployment,
                                              deployment_id,
                                              id_only=True)
        psite_list, _ = self.RR.find_subjects(RT.PlatformSite,
                                              PRED.hasDeployment,
                                              deployment_id,
                                              id_only=True)
        sites = isite_list + psite_list
        self.assertEquals(1, len(sites))
        return sites[0], devices[0]

    def _find_resource_in_list(self,
                               res_list,
                               attr,
                               attr_val,
                               assert_found=True):
        for res in res_list:
            v = getattr(res, attr, None)
            if v == attr_val:
                return res
        if assert_found:
            self.assertTrue(
                False,
                "Attribute %s value %s not found in list" % (attr, attr_val))
        return None

    # -------------------------------------------------------------------------

    def _get_caller(self):
        s = inspect.stack()
        return "%s:%s" % (s[2][1], s[2][2])

    @assertion_wrapper
    def assert_array_almost_equal(self, *args, **kwargs):
        np.testing.assert_array_almost_equal(*args, **kwargs)

    @assertion_wrapper
    def assertEquals(self, *args, **kwargs):
        IonIntegrationTestCase.assertEquals(self, *args, **kwargs)

    @assertion_wrapper
    def assertTrue(self, *args, **kwargs):
        IonIntegrationTestCase.assertTrue(self, *args, **kwargs)
示例#32
0
class GenericIntHelperTestCase(IonIntegrationTestCase):
    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.generic_int_helper_rr = ResourceRegistryServiceClient(
            node=self.container.node)
        self.generic_int_helper_rr2 = EnhancedResourceRegistryClient(
            self.generic_int_helper_rr)

    def assert_lcs_fail(self, owner_service, resource_label, resource_id,
                        lc_event):
        """
        execute an lcs event and verify that it fails

        @param owner_service instance of service client that will handle the request
        @param resource_label string like "instrument_device"
        @param resource_id string
        @param lc_event string like LCE.INTEGRATE
        """

        lcsmethod = getattr(owner_service,
                            "execute_%s_lifecycle" % resource_label)
        #lcsmethod(resource_id, lc_event)
        if True:
            log.warn("Skipping generic_lcs_fail for beta testing purposes")
            return
        self.assertRaises(Unauthorized, lcsmethod, resource_id, lc_event)

    def assert_lcs_pass(self, owner_service, resource_label, resource_id,
                        lc_event, lc_state):
        """
        execute an lcs event and verify that it passes and affects state

        @param owner_service instance of service client that will handle the request
        @param resource_label string like "instrument_device"
        @param resource_id string
        @param lc_event string like LCE.INTEGRATE
        @param lc_state string like LCS.INTEGRATED (where the state should end up
        """

        lcsmethod = getattr(owner_service,
                            "execute_%s_lifecycle" % resource_label)
        readmethod = getattr(owner_service, "read_%s" % resource_label)

        lcsmethod(resource_id, lc_event)
        resource_obj = readmethod(resource_id)

        self.assertEqual(lc_state, resource_obj.lcstate)

    def perform_association_script(self, assign_obj_to_subj_fn, find_subj_fn,
                                   find_obj_fn, subj_id, obj_id):
        """
        create an association and test that it went properly

        @param assign_obj_to_subj_fn the service method that takes (obj, subj) and associates them
        @param find_subj_fn the service method that returns a list of subjects given an object
        @param find_obj_fn the service method that returns a list of objects given a subject
        @param subj_id the subject id to associate
        @param obj_id the object id to associate
        """
        initial_subj_count = len(find_subj_fn(obj_id))
        initial_obj_count = len(find_obj_fn(subj_id))

        log.debug("Creating association")
        if not ("str" == type(subj_id).__name__ == type(obj_id).__name__):
            raise NotImplementedError(
                "%s='%s' to %s='%s'" %
                (type(subj_id), str(subj_id), type(obj_id), str(obj_id)))
        if not (subj_id and obj_id):
            raise NotImplementedError(
                "%s='%s' to %s='%s'" %
                (type(subj_id), str(subj_id), type(obj_id), str(obj_id)))
        assign_obj_to_subj_fn(obj_id, subj_id)

        log.debug("Verifying find-subj-by-obj")
        subjects = find_subj_fn(obj_id)
        self.assertEqual(initial_subj_count + 1, len(subjects))
        subject_ids = []
        for x in subjects:
            if not "_id" in x:
                raise Inconsistent(
                    "'_id' field not found in resource! got: %s" % str(x))
            subject_ids.append(x._id)
        self.assertIn(subj_id, subject_ids)

        log.debug("Verifying find-obj-by-subj")
        objects = find_obj_fn(subj_id)
        self.assertEqual(initial_obj_count + 1, len(objects))
        object_ids = []
        for x in objects:
            if not "_id" in x:
                raise Inconsistent(
                    "'_id' field not found in resource! got: %s" % str(x))
            object_ids.append(x._id)
        self.assertIn(obj_id, object_ids)

    def perform_fd_script(self, resource_id, resource_label, owner_service):
        """
        delete a resource and check that it was properly deleted

        @param resource_id id to be deleted
        @param resource_label something like platform_model
        @param owner_service service client instance
        """

        del_op = getattr(owner_service, "force_delete_%s" % resource_label)

        del_op(resource_id)

        # try again to make sure that we get NotFound
        self.assertRaises(NotFound, del_op, resource_id)

    def perform_fcruf_script(self,
                             resource_iontype,
                             resource_label,
                             owner_service,
                             actual_obj=None,
                             extra_fn=None):
        """
        run through find, create, read, update, and find ops on a basic resource

        NO DELETE in here.

        @param resource_iontype something like RT.PlatformModel
        @param resource_label something like platform_model
        @param owner_service a service client instance like InstrumentManagementServiceClient
        @param actual_obj use this IonObject instead of a generic object for testing
        @param extra_fn a function to run after the script, taking the new resource id as input
        @return generic_id, the resource_id of the generic resource that was created
        """

        # this section is just to make the LCA integration script easier to write.
        #
        # each resource type gets put through (essentially) the same steps.
        #
        # so, we just set up a generic service-esque object.
        # (basically just a nice package of shortcuts):
        #  create a fake service object and populate it with the methods we need

        some_service = DotDict()

        def fill(svc, method):
            """
            make a "shortcut service" for testing crud ops.
            @param svc a dotdict
            @param method the method name to add
            """

            realmethod = "%s_widget" % method

            setattr(svc, realmethod,
                    getattr(owner_service, "%s_%s" % (method, resource_label)))

        fill(some_service, "create")
        fill(some_service, "read")
        fill(some_service, "update")
        fill(some_service, "delete")

        def find_widgets():
            ret, _ = self.generic_int_helper_rr.find_resources(
                resource_iontype, None, None, False)
            return ret

        #UX team: generic script for LCA resource operations begins here.
        # some_service will be replaced with whatever service you're calling
        # widget will be replaced with whatever resource you're working with
        # resource_label will be data_product or logical_instrument

        log.info("Finding %s objects", resource_label)
        num_objs = len(find_widgets())
        log.info("I found %d %s objects", num_objs, resource_label)

        generic_obj = actual_obj or any_old(resource_iontype)
        log.info("Creating a %s with name='%s'", resource_label,
                 generic_obj.name)
        generic_id = some_service.create_widget(generic_obj)
        self.assertIsNotNone(generic_id,
                             "%s failed its creation" % resource_iontype)

        log.info("Reading %s '%s'", resource_label, generic_id)
        generic_ret = some_service.read_widget(generic_id)

        log.info("Verifying equality of stored and retrieved object")
        self.assertEqual(generic_obj.name, generic_ret.name)
        self.assertEqual(generic_obj.description, generic_ret.description)

        log.info("Updating %s '%s'", resource_label, generic_id)
        generic_newname = "%s updated" % generic_ret.name
        generic_ret.name = generic_newname
        some_service.update_widget(generic_ret)

        log.info("Reading %s '%s' to verify update", resource_iontype,
                 generic_id)
        generic_ret = some_service.read_widget(generic_id)

        self.assertEqual(generic_newname, generic_ret.name)
        self.assertEqual(generic_obj.description, generic_ret.description)

        log.info("Finding %s objects... checking that there's a new one",
                 resource_iontype)
        num_objs2 = len(find_widgets())

        log.info("There were %s and now there are %s", num_objs, num_objs2)
        self.assertTrue(num_objs2 > num_objs)

        if not extra_fn is None:
            log.info("Running extra_fn on generic resource '%s'" % generic_id)
            extra_fn(generic_id)

        log.info("Returning %s with id '%s'", resource_iontype, generic_id)
        return generic_id
class VisStreamLauncher(ImmediateProcess):
    """
    Class emulates a stream source from a NetCDF file. It emits a record of data every few seconds on a
    stream identified by a routing key.

    """


    def on_init(self):
        log.debug("VizStreamProducer init. Self.id=%s" % self.id)

    def on_start(self):

        log.debug("VizStreamProducer start")
        self.data_source_name = self.CFG.get('name')
        self.dataset = self.CFG.get('dataset')

        # create a pubsub client and a resource registry client
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.pubsubclient = PubsubManagementServiceClient(node=self.container.node)

        # Dummy instrument related clients
        self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.dpclient = DataProductManagementServiceClient(node=self.container.node)
        self.IngestClient = IngestionManagementServiceClient(node=self.container.node)

        # create the pubsub client
        self.pubsubclient = PubsubManagementServiceClient(node=self.container.node)


        # Additional code for creating a dummy instrument
        """
        # Set up the preconditions. Look for an existing ingestion config
        while True:
            log.info("VisStreamLauncher:on_start: Waiting for an ingestion configuration to be available.")
            ingestion_cfgs, _ = self.rrclient.find_resources(RT.IngestionConfiguration, None, None, True)

            if len(ingestion_cfgs) > 0:
                break
            else:
                gevent.sleep(1)
        """


        # Check to see if the data_product already exists in the system (for e.g re launching the code after a crash)
        dp_ids,_ = self.rrclient.find_resources(RT.DataProduct, None, self.data_source_name, True)
        if len(dp_ids) > 0:
            data_product_id = dp_ids[0]
            print '>>>>>>>>>>>>> Found dp_id = ', data_product_id
        else:
            # Create InstrumentModel
            instModel_obj = IonObject(RT.InstrumentModel, name=self.data_source_name, description=self.data_source_name, model=self.data_source_name)
            instModel_id = self.imsclient.create_instrument_model(instModel_obj)

            # Create InstrumentDevice
            instDevice_obj = IonObject(RT.InstrumentDevice, name=self.data_source_name, description=self.data_source_name, serial_number="12345" )

            instDevice_id = self.imsclient.create_instrument_device(instrument_device=instDevice_obj)
            self.imsclient.assign_instrument_model_to_instrument_device(instModel_id, instDevice_id)

            # create a stream definition for the data from the ctd simulator
            ctd_stream_def = SBE37_CDM_stream_definition()
            ctd_stream_def_id = self.pubsubclient.create_stream_definition(container=ctd_stream_def)

            print 'Creating new CDM data product with a stream definition'
            dp_obj = IonObject(RT.DataProduct,name=self.data_source_name,description='ctd stream test')
            data_product_id = self.dpclient.create_data_product(dp_obj, ctd_stream_def_id)

            self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=data_product_id)
            self.dpclient.activate_data_product_persistence(data_product_id=data_product_id, persist_data=True, persist_metadata=True)

            print '>>>>>>>>>>>> New dp_id = ', data_product_id

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(data_product_id, PRED.hasStream, None, True)

        if self.dataset == 'sinusoidal':
            pid = self.container.spawn_process(name='ctd_test.' + self.data_source_name ,
                module='ion.processes.data.sinusoidal_stream_publisher',cls='SinusoidalCtdPublisher',config={'process':{'stream_id':stream_ids[0]}})
        else:
            pid = self.container.spawn_process(name='ctd_test.' + self.data_source_name ,
                module='ion.processes.data.ctd_stream_publisher',cls='SimpleCtdPublisher',config={'process':{'stream_id':stream_ids[0]}})




    def on_quit(self):
        log.debug("VizStreamProducer quit")
示例#34
0
class PubsubManagementIntTest(IonIntegrationTestCase):
    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url("res/deploy/r2deploy.yml")
        self.pubsub_management = PubsubManagementServiceClient()
        self.resource_registry = ResourceRegistryServiceClient()
        self.dataset_management = DatasetManagementServiceClient()

        self.queue_cleanup = list()
        self.exchange_cleanup = list()

    def tearDown(self):
        for queue in self.queue_cleanup:
            xn = self.container.ex_manager.create_xn_queue(queue)
            xn.delete()
        for exchange in self.exchange_cleanup:
            xp = self.container.ex_manager.create_xp(exchange)
            xp.delete()

    def test_stream_def_crud(self):

        # Test Creation
        pdict = DatasetManagementService.get_parameter_dictionary_by_name("ctd_parsed_param_dict")
        stream_definition_id = self.pubsub_management.create_stream_definition(
            "ctd parsed", parameter_dictionary_id=pdict.identifier
        )

        # Make sure there is an assoc
        self.assertTrue(
            self.resource_registry.find_associations(
                subject=stream_definition_id,
                predicate=PRED.hasParameterDictionary,
                object=pdict.identifier,
                id_only=True,
            )
        )

        # Test Reading
        stream_definition = self.pubsub_management.read_stream_definition(stream_definition_id)
        self.assertTrue(PubsubManagementService._compare_pdicts(pdict.dump(), stream_definition.parameter_dictionary))

        # Test Deleting
        self.pubsub_management.delete_stream_definition(stream_definition_id)
        self.assertFalse(
            self.resource_registry.find_associations(
                subject=stream_definition_id,
                predicate=PRED.hasParameterDictionary,
                object=pdict.identifier,
                id_only=True,
            )
        )

        # Test comparisons
        in_stream_definition_id = self.pubsub_management.create_stream_definition(
            "L0 products",
            parameter_dictionary=pdict.identifier,
            available_fields=["time", "temp", "conductivity", "pressure"],
        )
        self.addCleanup(self.pubsub_management.delete_stream_definition, in_stream_definition_id)

        out_stream_definition_id = in_stream_definition_id
        self.assertTrue(
            self.pubsub_management.compare_stream_definition(in_stream_definition_id, out_stream_definition_id)
        )
        self.assertTrue(
            self.pubsub_management.compatible_stream_definitions(in_stream_definition_id, out_stream_definition_id)
        )

        out_stream_definition_id = self.pubsub_management.create_stream_definition(
            "L2 Products", parameter_dictionary=pdict.identifier, available_fields=["time", "salinity", "density"]
        )
        self.addCleanup(self.pubsub_management.delete_stream_definition, out_stream_definition_id)
        self.assertFalse(
            self.pubsub_management.compare_stream_definition(in_stream_definition_id, out_stream_definition_id)
        )

        self.assertTrue(
            self.pubsub_management.compatible_stream_definitions(in_stream_definition_id, out_stream_definition_id)
        )

    def test_validate_stream_defs(self):

        # test no input
        incoming_pdict_id = self._get_pdict(["time", "lat", "lon", "TEMPWAT_L0", "CONDWAT_L0", "PRESWAT_L0"])
        outgoing_pdict_id = self._get_pdict(["DENSITY", "PRACSAL", "TEMPWAT_L1", "CONDWAT_L1", "PRESWAT_L1"])
        available_fields_in = []
        available_fields_out = []
        incoming_stream_def_id = self.pubsub_management.create_stream_definition(
            "in_sd_0", parameter_dictionary_id=incoming_pdict_id, available_fields=available_fields_in
        )
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition(
            "out_sd_0", parameter_dictionary_id=outgoing_pdict_id, available_fields=available_fields_out
        )
        result = self.pubsub_management.validate_stream_defs(incoming_stream_def_id, outgoing_stream_def_id)
        self.assertFalse(result)

        # test input with no output
        incoming_pdict_id = self._get_pdict(["time", "lat", "lon", "TEMPWAT_L0", "CONDWAT_L0", "PRESWAT_L0"])
        outgoing_pdict_id = self._get_pdict(["DENSITY", "PRACSAL", "TEMPWAT_L1", "CONDWAT_L1", "PRESWAT_L1"])
        available_fields_in = ["time", "lat", "lon", "TEMPWAT_L0", "CONDWAT_L0", "PRESWAT_L0"]
        available_fields_out = []
        incoming_stream_def_id = self.pubsub_management.create_stream_definition(
            "in_sd_1", parameter_dictionary_id=incoming_pdict_id, available_fields=available_fields_in
        )
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition(
            "out_sd_1", parameter_dictionary_id=outgoing_pdict_id, available_fields=available_fields_out
        )
        result = self.pubsub_management.validate_stream_defs(incoming_stream_def_id, outgoing_stream_def_id)
        self.assertTrue(result)

        # test available field missing parameter context definition -- missing PRESWAT_L0
        incoming_pdict_id = self._get_pdict(["time", "lat", "lon", "TEMPWAT_L0", "CONDWAT_L0"])
        outgoing_pdict_id = self._get_pdict(["DENSITY", "PRACSAL", "TEMPWAT_L1", "CONDWAT_L1", "PRESWAT_L1"])
        available_fields_in = ["time", "lat", "lon", "TEMPWAT_L0", "CONDWAT_L0", "PRESWAT_L0"]
        available_fields_out = ["DENSITY"]
        incoming_stream_def_id = self.pubsub_management.create_stream_definition(
            "in_sd_2", parameter_dictionary_id=incoming_pdict_id, available_fields=available_fields_in
        )
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition(
            "out_sd_2", parameter_dictionary_id=outgoing_pdict_id, available_fields=available_fields_out
        )
        result = self.pubsub_management.validate_stream_defs(incoming_stream_def_id, outgoing_stream_def_id)
        self.assertFalse(result)

        # test l1 from l0
        incoming_pdict_id = self._get_pdict(["time", "lat", "lon", "TEMPWAT_L0", "CONDWAT_L0", "PRESWAT_L0"])
        outgoing_pdict_id = self._get_pdict(["TEMPWAT_L1", "CONDWAT_L1", "PRESWAT_L1"])
        available_fields_in = ["time", "lat", "lon", "TEMPWAT_L0", "CONDWAT_L0", "PRESWAT_L0"]
        available_fields_out = ["TEMPWAT_L1", "CONDWAT_L1", "PRESWAT_L1"]
        incoming_stream_def_id = self.pubsub_management.create_stream_definition(
            "in_sd_3", parameter_dictionary_id=incoming_pdict_id, available_fields=available_fields_in
        )
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition(
            "out_sd_3", parameter_dictionary_id=outgoing_pdict_id, available_fields=available_fields_out
        )
        result = self.pubsub_management.validate_stream_defs(incoming_stream_def_id, outgoing_stream_def_id)
        self.assertTrue(result)

        # test l2 from l0
        incoming_pdict_id = self._get_pdict(["time", "lat", "lon", "TEMPWAT_L0", "CONDWAT_L0", "PRESWAT_L0"])
        outgoing_pdict_id = self._get_pdict(["TEMPWAT_L1", "CONDWAT_L1", "PRESWAT_L1", "DENSITY", "PRACSAL"])
        available_fields_in = ["time", "lat", "lon", "TEMPWAT_L0", "CONDWAT_L0", "PRESWAT_L0"]
        available_fields_out = ["DENSITY", "PRACSAL"]
        incoming_stream_def_id = self.pubsub_management.create_stream_definition(
            "in_sd_4", parameter_dictionary_id=incoming_pdict_id, available_fields=available_fields_in
        )
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition(
            "out_sd_4", parameter_dictionary_id=outgoing_pdict_id, available_fields=available_fields_out
        )
        result = self.pubsub_management.validate_stream_defs(incoming_stream_def_id, outgoing_stream_def_id)
        self.assertTrue(result)

        # test Ln from L0
        incoming_pdict_id = self._get_pdict(["time", "lat", "lon", "TEMPWAT_L0", "CONDWAT_L0", "PRESWAT_L0"])
        outgoing_pdict_id = self._get_pdict(["DENSITY", "PRACSAL", "TEMPWAT_L1", "CONDWAT_L1", "PRESWAT_L1"])
        available_fields_in = ["time", "lat", "lon", "TEMPWAT_L0", "CONDWAT_L0", "PRESWAT_L0"]
        available_fields_out = ["DENSITY", "PRACSAL", "TEMPWAT_L1", "CONDWAT_L1", "PRESWAT_L1"]
        incoming_stream_def_id = self.pubsub_management.create_stream_definition(
            "in_sd_5", parameter_dictionary_id=incoming_pdict_id, available_fields=available_fields_in
        )
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition(
            "out_sd_5", parameter_dictionary_id=outgoing_pdict_id, available_fields=available_fields_out
        )
        result = self.pubsub_management.validate_stream_defs(incoming_stream_def_id, outgoing_stream_def_id)
        self.assertTrue(result)

        # test L2 from L1
        incoming_pdict_id = self._get_pdict(["time", "lat", "lon", "TEMPWAT_L1", "CONDWAT_L1", "PRESWAT_L1"])
        outgoing_pdict_id = self._get_pdict(["DENSITY", "PRACSAL", "TEMPWAT_L1", "CONDWAT_L1", "PRESWAT_L1"])
        available_fields_in = ["time", "lat", "lon", "TEMPWAT_L1", "CONDWAT_L1", "PRESWAT_L1"]
        available_fields_out = ["DENSITY", "PRACSAL"]
        incoming_stream_def_id = self.pubsub_management.create_stream_definition(
            "in_sd_6", parameter_dictionary_id=incoming_pdict_id, available_fields=available_fields_in
        )
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition(
            "out_sd_6", parameter_dictionary_id=outgoing_pdict_id, available_fields=available_fields_out
        )
        result = self.pubsub_management.validate_stream_defs(incoming_stream_def_id, outgoing_stream_def_id)
        self.assertTrue(result)

        # test L1 from L0 missing L0
        incoming_pdict_id = self._get_pdict(["time", "lat", "lon"])
        outgoing_pdict_id = self._get_pdict(["TEMPWAT_L1", "CONDWAT_L1", "PRESWAT_L1"])
        available_fields_in = ["time", "lat", "lon"]
        available_fields_out = ["DENSITY", "PRACSAL"]
        incoming_stream_def_id = self.pubsub_management.create_stream_definition(
            "in_sd_7", parameter_dictionary_id=incoming_pdict_id, available_fields=available_fields_in
        )
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition(
            "out_sd_7", parameter_dictionary_id=outgoing_pdict_id, available_fields=available_fields_out
        )
        result = self.pubsub_management.validate_stream_defs(incoming_stream_def_id, outgoing_stream_def_id)
        self.assertFalse(result)

        # test L2 from L0 missing L0
        incoming_pdict_id = self._get_pdict(["time", "lat", "lon"])
        outgoing_pdict_id = self._get_pdict(["DENSITY", "PRACSAL", "TEMPWAT_L1", "CONDWAT_L1", "PRESWAT_L1"])
        available_fields_in = ["time", "lat", "lon"]
        available_fields_out = ["DENSITY", "PRACSAL"]
        incoming_stream_def_id = self.pubsub_management.create_stream_definition(
            "in_sd_8", parameter_dictionary_id=incoming_pdict_id, available_fields=available_fields_in
        )
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition(
            "out_sd_8", parameter_dictionary_id=outgoing_pdict_id, available_fields=available_fields_out
        )
        result = self.pubsub_management.validate_stream_defs(incoming_stream_def_id, outgoing_stream_def_id)
        self.assertFalse(result)

        # test L2 from L0 missing L1
        incoming_pdict_id = self._get_pdict(["time", "lat", "lon", "TEMPWAT_L0", "CONDWAT_L0", "PRESWAT_L0"])
        outgoing_pdict_id = self._get_pdict(["DENSITY", "PRACSAL"])
        available_fields_in = ["time", "lat", "lon", "TEMPWAT_L0", "CONDWAT_L0", "PRESWAT_L0"]
        available_fields_out = ["DENSITY", "PRACSAL"]
        incoming_stream_def_id = self.pubsub_management.create_stream_definition(
            "in_sd_9", parameter_dictionary_id=incoming_pdict_id, available_fields=available_fields_in
        )
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition(
            "out_sd_9", parameter_dictionary_id=outgoing_pdict_id, available_fields=available_fields_out
        )
        result = self.pubsub_management.validate_stream_defs(incoming_stream_def_id, outgoing_stream_def_id)
        self.assertFalse(result)

    def publish_on_stream(self, stream_id, msg):
        stream = self.pubsub_management.read_stream(stream_id)
        stream_route = stream.stream_route
        publisher = StandaloneStreamPublisher(stream_id=stream_id, stream_route=stream_route)
        publisher.publish(msg)

    def test_stream_crud(self):
        stream_def_id = self.pubsub_management.create_stream_definition("test_definition", stream_type="stream")
        topic_id = self.pubsub_management.create_topic(name="test_topic", exchange_point="test_exchange")
        self.exchange_cleanup.append("test_exchange")
        topic2_id = self.pubsub_management.create_topic(name="another_topic", exchange_point="outside")
        stream_id, route = self.pubsub_management.create_stream(
            name="test_stream",
            topic_ids=[topic_id, topic2_id],
            exchange_point="test_exchange",
            stream_definition_id=stream_def_id,
        )

        topics, assocs = self.resource_registry.find_objects(subject=stream_id, predicate=PRED.hasTopic, id_only=True)
        self.assertEquals(topics, [topic_id])

        defs, assocs = self.resource_registry.find_objects(
            subject=stream_id, predicate=PRED.hasStreamDefinition, id_only=True
        )
        self.assertTrue(len(defs))

        stream = self.pubsub_management.read_stream(stream_id)
        self.assertEquals(stream.name, "test_stream")
        self.pubsub_management.delete_stream(stream_id)

        with self.assertRaises(NotFound):
            self.pubsub_management.read_stream(stream_id)

        defs, assocs = self.resource_registry.find_objects(
            subject=stream_id, predicate=PRED.hasStreamDefinition, id_only=True
        )
        self.assertFalse(len(defs))

        topics, assocs = self.resource_registry.find_objects(subject=stream_id, predicate=PRED.hasTopic, id_only=True)
        self.assertFalse(len(topics))

        self.pubsub_management.delete_topic(topic_id)
        self.pubsub_management.delete_topic(topic2_id)
        self.pubsub_management.delete_stream_definition(stream_def_id)

    def test_subscription_crud(self):
        stream_def_id = self.pubsub_management.create_stream_definition("test_definition", stream_type="stream")
        stream_id, route = self.pubsub_management.create_stream(
            name="test_stream", exchange_point="test_exchange", stream_definition_id=stream_def_id
        )
        subscription_id = self.pubsub_management.create_subscription(
            name="test subscription", stream_ids=[stream_id], exchange_name="test_queue"
        )
        self.exchange_cleanup.append("test_exchange")

        subs, assocs = self.resource_registry.find_objects(
            subject=subscription_id, predicate=PRED.hasStream, id_only=True
        )
        self.assertEquals(subs, [stream_id])

        res, _ = self.resource_registry.find_resources(restype=RT.ExchangeName, name="test_queue", id_only=True)
        self.assertEquals(len(res), 1)

        subs, assocs = self.resource_registry.find_subjects(
            object=subscription_id, predicate=PRED.hasSubscription, id_only=True
        )
        self.assertEquals(subs[0], res[0])

        subscription = self.pubsub_management.read_subscription(subscription_id)
        self.assertEquals(subscription.exchange_name, "test_queue")

        self.pubsub_management.delete_subscription(subscription_id)

        subs, assocs = self.resource_registry.find_objects(
            subject=subscription_id, predicate=PRED.hasStream, id_only=True
        )
        self.assertFalse(len(subs))

        subs, assocs = self.resource_registry.find_subjects(
            object=subscription_id, predicate=PRED.hasSubscription, id_only=True
        )
        self.assertFalse(len(subs))

        self.pubsub_management.delete_stream(stream_id)
        self.pubsub_management.delete_stream_definition(stream_def_id)

    def test_move_before_activate(self):
        stream_id, route = self.pubsub_management.create_stream(name="test_stream", exchange_point="test_xp")

        # --------------------------------------------------------------------------------
        # Test moving before activate
        # --------------------------------------------------------------------------------

        subscription_id = self.pubsub_management.create_subscription("first_queue", stream_ids=[stream_id])

        xn_ids, _ = self.resource_registry.find_resources(restype=RT.ExchangeName, name="first_queue", id_only=True)
        subjects, _ = self.resource_registry.find_subjects(
            object=subscription_id, predicate=PRED.hasSubscription, id_only=True
        )
        self.assertEquals(xn_ids[0], subjects[0])

        self.pubsub_management.move_subscription(subscription_id, exchange_name="second_queue")

        xn_ids, _ = self.resource_registry.find_resources(restype=RT.ExchangeName, name="second_queue", id_only=True)
        subjects, _ = self.resource_registry.find_subjects(
            object=subscription_id, predicate=PRED.hasSubscription, id_only=True
        )

        self.assertEquals(len(subjects), 1)
        self.assertEquals(subjects[0], xn_ids[0])

        self.pubsub_management.delete_subscription(subscription_id)
        self.pubsub_management.delete_stream(stream_id)

    def test_move_activated_subscription(self):

        stream_id, route = self.pubsub_management.create_stream(name="test_stream", exchange_point="test_xp")
        # --------------------------------------------------------------------------------
        # Test moving after activate
        # --------------------------------------------------------------------------------

        subscription_id = self.pubsub_management.create_subscription("first_queue", stream_ids=[stream_id])
        self.pubsub_management.activate_subscription(subscription_id)

        xn_ids, _ = self.resource_registry.find_resources(restype=RT.ExchangeName, name="first_queue", id_only=True)
        subjects, _ = self.resource_registry.find_subjects(
            object=subscription_id, predicate=PRED.hasSubscription, id_only=True
        )
        self.assertEquals(xn_ids[0], subjects[0])

        self.verified = Event()

        def verify(m, r, s):
            self.assertEquals(m, "verified")
            self.verified.set()

        subscriber = StandaloneStreamSubscriber("second_queue", verify)
        subscriber.start()

        self.pubsub_management.move_subscription(subscription_id, exchange_name="second_queue")

        xn_ids, _ = self.resource_registry.find_resources(restype=RT.ExchangeName, name="second_queue", id_only=True)
        subjects, _ = self.resource_registry.find_subjects(
            object=subscription_id, predicate=PRED.hasSubscription, id_only=True
        )

        self.assertEquals(len(subjects), 1)
        self.assertEquals(subjects[0], xn_ids[0])

        publisher = StandaloneStreamPublisher(stream_id, route)
        publisher.publish("verified")

        self.assertTrue(self.verified.wait(2))

        self.pubsub_management.deactivate_subscription(subscription_id)

        self.pubsub_management.delete_subscription(subscription_id)
        self.pubsub_management.delete_stream(stream_id)

    def test_queue_cleanup(self):
        stream_id, route = self.pubsub_management.create_stream("test_stream", "xp1")
        xn_objs, _ = self.resource_registry.find_resources(restype=RT.ExchangeName, name="queue1")
        for xn_obj in xn_objs:
            xn = self.container.ex_manager.create_xn_queue(xn_obj.name)
            xn.delete()
        subscription_id = self.pubsub_management.create_subscription("queue1", stream_ids=[stream_id])
        xn_ids, _ = self.resource_registry.find_resources(restype=RT.ExchangeName, name="queue1")
        self.assertEquals(len(xn_ids), 1)

        self.pubsub_management.delete_subscription(subscription_id)

        xn_ids, _ = self.resource_registry.find_resources(restype=RT.ExchangeName, name="queue1")
        self.assertEquals(len(xn_ids), 0)

    def test_activation_and_deactivation(self):
        stream_id, route = self.pubsub_management.create_stream("stream1", "xp1")
        subscription_id = self.pubsub_management.create_subscription("sub1", stream_ids=[stream_id])

        self.check1 = Event()

        def verifier(m, r, s):
            self.check1.set()

        subscriber = StandaloneStreamSubscriber("sub1", verifier)
        subscriber.start()

        publisher = StandaloneStreamPublisher(stream_id, route)
        publisher.publish("should not receive")

        self.assertFalse(self.check1.wait(0.25))

        self.pubsub_management.activate_subscription(subscription_id)

        publisher.publish("should receive")
        self.assertTrue(self.check1.wait(2))

        self.check1.clear()
        self.assertFalse(self.check1.is_set())

        self.pubsub_management.deactivate_subscription(subscription_id)

        publisher.publish("should not receive")
        self.assertFalse(self.check1.wait(0.5))

        self.pubsub_management.activate_subscription(subscription_id)

        publisher.publish("should receive")
        self.assertTrue(self.check1.wait(2))

        subscriber.stop()

        self.pubsub_management.deactivate_subscription(subscription_id)
        self.pubsub_management.delete_subscription(subscription_id)
        self.pubsub_management.delete_stream(stream_id)

    def test_topic_crud(self):

        topic_id = self.pubsub_management.create_topic(name="test_topic", exchange_point="test_xp")
        self.exchange_cleanup.append("test_xp")

        topic = self.pubsub_management.read_topic(topic_id)

        self.assertEquals(topic.name, "test_topic")
        self.assertEquals(topic.exchange_point, "test_xp")

        self.pubsub_management.delete_topic(topic_id)
        with self.assertRaises(NotFound):
            self.pubsub_management.read_topic(topic_id)

    def test_full_pubsub(self):

        self.sub1_sat = Event()
        self.sub2_sat = Event()

        def subscriber1(m, r, s):
            self.sub1_sat.set()

        def subscriber2(m, r, s):
            self.sub2_sat.set()

        sub1 = StandaloneStreamSubscriber("sub1", subscriber1)
        self.queue_cleanup.append(sub1.xn.queue)
        sub1.start()

        sub2 = StandaloneStreamSubscriber("sub2", subscriber2)
        self.queue_cleanup.append(sub2.xn.queue)
        sub2.start()

        log_topic = self.pubsub_management.create_topic("instrument_logs", exchange_point="instruments")
        science_topic = self.pubsub_management.create_topic("science_data", exchange_point="instruments")
        events_topic = self.pubsub_management.create_topic("notifications", exchange_point="events")

        log_stream, route = self.pubsub_management.create_stream(
            "instrument1-logs", topic_ids=[log_topic], exchange_point="instruments"
        )
        ctd_stream, route = self.pubsub_management.create_stream(
            "instrument1-ctd", topic_ids=[science_topic], exchange_point="instruments"
        )
        event_stream, route = self.pubsub_management.create_stream(
            "notifications", topic_ids=[events_topic], exchange_point="events"
        )
        raw_stream, route = self.pubsub_management.create_stream("temp", exchange_point="global.data")
        self.exchange_cleanup.extend(["instruments", "events", "global.data"])

        subscription1 = self.pubsub_management.create_subscription(
            "subscription1", stream_ids=[log_stream, event_stream], exchange_name="sub1"
        )
        subscription2 = self.pubsub_management.create_subscription(
            "subscription2", exchange_points=["global.data"], stream_ids=[ctd_stream], exchange_name="sub2"
        )

        self.pubsub_management.activate_subscription(subscription1)
        self.pubsub_management.activate_subscription(subscription2)

        self.publish_on_stream(log_stream, 1)
        self.assertTrue(self.sub1_sat.wait(4))
        self.assertFalse(self.sub2_sat.is_set())

        self.publish_on_stream(raw_stream, 1)
        self.assertTrue(self.sub1_sat.wait(4))

        sub1.stop()
        sub2.stop()

    def test_topic_craziness(self):

        self.msg_queue = Queue()

        def subscriber1(m, r, s):
            self.msg_queue.put(m)

        sub1 = StandaloneStreamSubscriber("sub1", subscriber1)
        self.queue_cleanup.append(sub1.xn.queue)
        sub1.start()

        topic1 = self.pubsub_management.create_topic("topic1", exchange_point="xp1")
        topic2 = self.pubsub_management.create_topic("topic2", exchange_point="xp1", parent_topic_id=topic1)
        topic3 = self.pubsub_management.create_topic("topic3", exchange_point="xp1", parent_topic_id=topic1)
        topic4 = self.pubsub_management.create_topic("topic4", exchange_point="xp1", parent_topic_id=topic2)
        topic5 = self.pubsub_management.create_topic("topic5", exchange_point="xp1", parent_topic_id=topic2)
        topic6 = self.pubsub_management.create_topic("topic6", exchange_point="xp1", parent_topic_id=topic3)
        topic7 = self.pubsub_management.create_topic("topic7", exchange_point="xp1", parent_topic_id=topic3)

        # Tree 2
        topic8 = self.pubsub_management.create_topic("topic8", exchange_point="xp2")
        topic9 = self.pubsub_management.create_topic("topic9", exchange_point="xp2", parent_topic_id=topic8)
        topic10 = self.pubsub_management.create_topic("topic10", exchange_point="xp2", parent_topic_id=topic9)
        topic11 = self.pubsub_management.create_topic("topic11", exchange_point="xp2", parent_topic_id=topic9)
        topic12 = self.pubsub_management.create_topic("topic12", exchange_point="xp2", parent_topic_id=topic11)
        topic13 = self.pubsub_management.create_topic("topic13", exchange_point="xp2", parent_topic_id=topic11)
        self.exchange_cleanup.extend(["xp1", "xp2"])

        stream1_id, route = self.pubsub_management.create_stream(
            "stream1", topic_ids=[topic7, topic4, topic5], exchange_point="xp1"
        )
        stream2_id, route = self.pubsub_management.create_stream("stream2", topic_ids=[topic8], exchange_point="xp2")
        stream3_id, route = self.pubsub_management.create_stream(
            "stream3", topic_ids=[topic10, topic13], exchange_point="xp2"
        )
        stream4_id, route = self.pubsub_management.create_stream("stream4", topic_ids=[topic9], exchange_point="xp2")
        stream5_id, route = self.pubsub_management.create_stream("stream5", topic_ids=[topic11], exchange_point="xp2")

        subscription1 = self.pubsub_management.create_subscription("sub1", topic_ids=[topic1])
        subscription2 = self.pubsub_management.create_subscription("sub2", topic_ids=[topic8], exchange_name="sub1")
        subscription3 = self.pubsub_management.create_subscription("sub3", topic_ids=[topic9], exchange_name="sub1")
        subscription4 = self.pubsub_management.create_subscription(
            "sub4", topic_ids=[topic10, topic13, topic11], exchange_name="sub1"
        )
        # --------------------------------------------------------------------------------
        self.pubsub_management.activate_subscription(subscription1)

        self.publish_on_stream(stream1_id, 1)

        self.assertEquals(self.msg_queue.get(timeout=10), 1)
        with self.assertRaises(Empty):
            self.msg_queue.get(timeout=0.1)

        self.pubsub_management.deactivate_subscription(subscription1)
        self.pubsub_management.delete_subscription(subscription1)
        # --------------------------------------------------------------------------------
        self.pubsub_management.activate_subscription(subscription2)

        self.publish_on_stream(stream2_id, 2)
        self.assertEquals(self.msg_queue.get(timeout=10), 2)
        with self.assertRaises(Empty):
            self.msg_queue.get(timeout=0.1)

        self.pubsub_management.deactivate_subscription(subscription2)
        self.pubsub_management.delete_subscription(subscription2)

        # --------------------------------------------------------------------------------
        self.pubsub_management.activate_subscription(subscription3)

        self.publish_on_stream(stream2_id, 3)
        with self.assertRaises(Empty):
            self.msg_queue.get(timeout=0.3)

        self.publish_on_stream(stream3_id, 4)
        self.assertEquals(self.msg_queue.get(timeout=10), 4)

        self.pubsub_management.deactivate_subscription(subscription3)
        self.pubsub_management.delete_subscription(subscription3)

        # --------------------------------------------------------------------------------
        self.pubsub_management.activate_subscription(subscription4)

        self.publish_on_stream(stream4_id, 5)
        with self.assertRaises(Empty):
            self.msg_queue.get(timeout=0.3)

        self.publish_on_stream(stream5_id, 6)
        self.assertEquals(self.msg_queue.get(timeout=10), 6)
        with self.assertRaises(Empty):
            self.msg_queue.get(timeout=0.3)

        self.pubsub_management.deactivate_subscription(subscription4)
        self.pubsub_management.delete_subscription(subscription4)

        # --------------------------------------------------------------------------------
        sub1.stop()

        self.pubsub_management.delete_topic(topic13)
        self.pubsub_management.delete_topic(topic12)
        self.pubsub_management.delete_topic(topic11)
        self.pubsub_management.delete_topic(topic10)
        self.pubsub_management.delete_topic(topic9)
        self.pubsub_management.delete_topic(topic8)
        self.pubsub_management.delete_topic(topic7)
        self.pubsub_management.delete_topic(topic6)
        self.pubsub_management.delete_topic(topic5)
        self.pubsub_management.delete_topic(topic4)
        self.pubsub_management.delete_topic(topic3)
        self.pubsub_management.delete_topic(topic2)
        self.pubsub_management.delete_topic(topic1)

        self.pubsub_management.delete_stream(stream1_id)
        self.pubsub_management.delete_stream(stream2_id)
        self.pubsub_management.delete_stream(stream3_id)
        self.pubsub_management.delete_stream(stream4_id)
        self.pubsub_management.delete_stream(stream5_id)

    def _get_pdict(self, filter_values):
        t_ctxt = ParameterContext("time", param_type=QuantityType(value_encoding=np.dtype("int64")))
        t_ctxt.uom = "seconds since 01-01-1900"
        t_ctxt.fill_value = -9999
        t_ctxt_id = self.dataset_management.create_parameter_context(
            name="time", parameter_context=t_ctxt.dump(), parameter_type="quantity<int64>", unit_of_measure=t_ctxt.uom
        )

        lat_ctxt = ParameterContext("lat", param_type=ConstantType(QuantityType(value_encoding=np.dtype("float32"))))
        lat_ctxt.axis = AxisTypeEnum.LAT
        lat_ctxt.uom = "degree_north"
        lat_ctxt.fill_value = -9999
        lat_ctxt_id = self.dataset_management.create_parameter_context(
            name="lat",
            parameter_context=lat_ctxt.dump(),
            parameter_type="quantity<float32>",
            unit_of_measure=lat_ctxt.uom,
        )

        lon_ctxt = ParameterContext("lon", param_type=ConstantType(QuantityType(value_encoding=np.dtype("float32"))))
        lon_ctxt.axis = AxisTypeEnum.LON
        lon_ctxt.uom = "degree_east"
        lon_ctxt.fill_value = -9999
        lon_ctxt_id = self.dataset_management.create_parameter_context(
            name="lon",
            parameter_context=lon_ctxt.dump(),
            parameter_type="quantity<float32>",
            unit_of_measure=lon_ctxt.uom,
        )

        temp_ctxt = ParameterContext("TEMPWAT_L0", param_type=QuantityType(value_encoding=np.dtype("float32")))
        temp_ctxt.uom = "deg_C"
        temp_ctxt.fill_value = -9999
        temp_ctxt_id = self.dataset_management.create_parameter_context(
            name="TEMPWAT_L0",
            parameter_context=temp_ctxt.dump(),
            parameter_type="quantity<float32>",
            unit_of_measure=temp_ctxt.uom,
        )

        # Conductivity - values expected to be the decimal results of conversion from hex
        cond_ctxt = ParameterContext("CONDWAT_L0", param_type=QuantityType(value_encoding=np.dtype("float32")))
        cond_ctxt.uom = "S m-1"
        cond_ctxt.fill_value = -9999
        cond_ctxt_id = self.dataset_management.create_parameter_context(
            name="CONDWAT_L0",
            parameter_context=cond_ctxt.dump(),
            parameter_type="quantity<float32>",
            unit_of_measure=cond_ctxt.uom,
        )

        # Pressure - values expected to be the decimal results of conversion from hex
        press_ctxt = ParameterContext("PRESWAT_L0", param_type=QuantityType(value_encoding=np.dtype("float32")))
        press_ctxt.uom = "dbar"
        press_ctxt.fill_value = -9999
        press_ctxt_id = self.dataset_management.create_parameter_context(
            name="PRESWAT_L0",
            parameter_context=press_ctxt.dump(),
            parameter_type="quantity<float32>",
            unit_of_measure=press_ctxt.uom,
        )

        # TEMPWAT_L1 = (TEMPWAT_L0 / 10000) - 10
        tl1_func = "(TEMPWAT_L0 / 10000) - 10"
        tl1_pmap = {"TEMPWAT_L0": "TEMPWAT_L0"}
        func = NumexprFunction("TEMPWAT_L1", tl1_func, tl1_pmap)
        tempL1_ctxt = ParameterContext(
            "TEMPWAT_L1", param_type=ParameterFunctionType(function=func), variability=VariabilityEnum.TEMPORAL
        )
        tempL1_ctxt.uom = "deg_C"

        tempL1_ctxt_id = self.dataset_management.create_parameter_context(
            name=tempL1_ctxt.name,
            parameter_context=tempL1_ctxt.dump(),
            parameter_type="pfunc",
            unit_of_measure=tempL1_ctxt.uom,
        )

        # CONDWAT_L1 = (CONDWAT_L0 / 100000) - 0.5
        cl1_func = "(CONDWAT_L0 / 100000) - 0.5"
        cl1_pmap = {"CONDWAT_L0": "CONDWAT_L0"}
        func = NumexprFunction("CONDWAT_L1", cl1_func, cl1_pmap)
        condL1_ctxt = ParameterContext(
            "CONDWAT_L1", param_type=ParameterFunctionType(function=func), variability=VariabilityEnum.TEMPORAL
        )
        condL1_ctxt.uom = "S m-1"
        condL1_ctxt_id = self.dataset_management.create_parameter_context(
            name=condL1_ctxt.name,
            parameter_context=condL1_ctxt.dump(),
            parameter_type="pfunc",
            unit_of_measure=condL1_ctxt.uom,
        )

        # Equation uses p_range, which is a calibration coefficient - Fixing to 679.34040721
        #   PRESWAT_L1 = (PRESWAT_L0 * p_range / (0.85 * 65536)) - (0.05 * p_range)
        pl1_func = "(PRESWAT_L0 * 679.34040721 / (0.85 * 65536)) - (0.05 * 679.34040721)"
        pl1_pmap = {"PRESWAT_L0": "PRESWAT_L0"}
        func = NumexprFunction("PRESWAT_L1", pl1_func, pl1_pmap)
        presL1_ctxt = ParameterContext(
            "PRESWAT_L1", param_type=ParameterFunctionType(function=func), variability=VariabilityEnum.TEMPORAL
        )
        presL1_ctxt.uom = "S m-1"
        presL1_ctxt_id = self.dataset_management.create_parameter_context(
            name=presL1_ctxt.name,
            parameter_context=presL1_ctxt.dump(),
            parameter_type="pfunc",
            unit_of_measure=presL1_ctxt.uom,
        )

        # Density & practical salinity calucluated using the Gibbs Seawater library - available via python-gsw project:
        #       https://code.google.com/p/python-gsw/ & http://pypi.python.org/pypi/gsw/3.0.1

        # PRACSAL = gsw.SP_from_C((CONDWAT_L1 * 10), TEMPWAT_L1, PRESWAT_L1)
        owner = "gsw"
        sal_func = "SP_from_C"
        sal_arglist = [NumexprFunction("CONDWAT_L1*10", "C*10", {"C": "CONDWAT_L1"}), "TEMPWAT_L1", "PRESWAT_L1"]
        sal_kwargmap = None
        func = PythonFunction("PRACSAL", owner, sal_func, sal_arglist, sal_kwargmap)
        sal_ctxt = ParameterContext(
            "PRACSAL", param_type=ParameterFunctionType(func), variability=VariabilityEnum.TEMPORAL
        )
        sal_ctxt.uom = "g kg-1"

        sal_ctxt_id = self.dataset_management.create_parameter_context(
            name=sal_ctxt.name, parameter_context=sal_ctxt.dump(), parameter_type="pfunc", unit_of_measure=sal_ctxt.uom
        )

        # absolute_salinity = gsw.SA_from_SP(PRACSAL, PRESWAT_L1, longitude, latitude)
        # conservative_temperature = gsw.CT_from_t(absolute_salinity, TEMPWAT_L1, PRESWAT_L1)
        # DENSITY = gsw.rho(absolute_salinity, conservative_temperature, PRESWAT_L1)
        owner = "gsw"
        abs_sal_func = PythonFunction("abs_sal", owner, "SA_from_SP", ["PRACSAL", "PRESWAT_L1", "lon", "lat"], None)
        # abs_sal_func = PythonFunction('abs_sal', owner, 'SA_from_SP', ['lon','lat'], None)
        cons_temp_func = PythonFunction(
            "cons_temp", owner, "CT_from_t", [abs_sal_func, "TEMPWAT_L1", "PRESWAT_L1"], None
        )
        dens_func = PythonFunction("DENSITY", owner, "rho", [abs_sal_func, cons_temp_func, "PRESWAT_L1"], None)
        dens_ctxt = ParameterContext(
            "DENSITY", param_type=ParameterFunctionType(dens_func), variability=VariabilityEnum.TEMPORAL
        )
        dens_ctxt.uom = "kg m-3"

        dens_ctxt_id = self.dataset_management.create_parameter_context(
            name=dens_ctxt.name,
            parameter_context=dens_ctxt.dump(),
            parameter_type="pfunc",
            unit_of_measure=dens_ctxt.uom,
        )

        ids = [
            t_ctxt_id,
            lat_ctxt_id,
            lon_ctxt_id,
            temp_ctxt_id,
            cond_ctxt_id,
            press_ctxt_id,
            tempL1_ctxt_id,
            condL1_ctxt_id,
            presL1_ctxt_id,
            sal_ctxt_id,
            dens_ctxt_id,
        ]
        contexts = [
            t_ctxt,
            lat_ctxt,
            lon_ctxt,
            temp_ctxt,
            cond_ctxt,
            press_ctxt,
            tempL1_ctxt,
            condL1_ctxt,
            presL1_ctxt,
            sal_ctxt,
            dens_ctxt,
        ]
        context_ids = [ids[i] for i, ctxt in enumerate(contexts) if ctxt.name in filter_values]
        pdict_name = "_".join([ctxt.name for ctxt in contexts if ctxt.name in filter_values])

        pdict_id = self.dataset_management.create_parameter_dictionary(
            pdict_name, parameter_context_ids=context_ids, temporal_context="time"
        )
        return pdict_id
class DirectCoverageAccess(object):
    def __init__(self):
        self.ingestion_management = IngestionManagementServiceClient()
        self.resource_registry = ResourceRegistryServiceClient()
        self.data_product_management = DataProductManagementServiceClient()
        self.dataset_management = DatasetManagementServiceClient()
        self._paused_streams = []
        self._w_covs = {}
        self._ro_covs = {}

        self._context_managed = False

    def __enter__(self):
        self._context_managed = True
        return self

    def __exit__(self, exc_type, exc_val, exc_tb):
        self.clean_up()

    def clean_up(self, ro_covs=False, w_covs=False, streams=False):
        if not ro_covs and not w_covs and not streams:
            ro_covs = w_covs = streams = True

        if ro_covs:
            # Close any open read-only coverages
            for dsid, c in self._ro_covs.iteritems():
                c.close()

        if w_covs:
            # Close any open write coverages
            for sid, c in self._w_covs.iteritems():
                c.close()

        if streams:
            # Resume any paused ingestion workers
            for s in self._paused_streams:
                self.resume_ingestion(s)

    def get_ingestion_config(self):
        '''
        Grab the ingestion configuration from the resource registry
        '''
        # The ingestion configuration should have been created by the bootstrap service
        # which is configured through r2deploy.yml

        ingest_configs, _ = self.resource_registry.find_resources(restype=RT.IngestionConfiguration,id_only=True)
        return ingest_configs[0]

    def get_coverage_path(self, dataset_id):
        pth = DatasetManagementService._get_coverage_path(dataset_id)
        if not os.path.exists(pth):
            raise ValueError('Coverage with id \'{0}\' does not exist!'.format(dataset_id))

        return pth

    def pause_ingestion(self, stream_id):
        if not self._context_managed:
            warn_user('Warning: Pausing ingestion when not using a context manager is potentially unsafe - '
                           'be sure to resume ingestion for all streams by calling self.clean_up(streams=True)')

        if stream_id not in self._paused_streams:
            self.ingestion_management.pause_data_stream(stream_id, self.get_ingestion_config())
            self._paused_streams.append(stream_id)

    def resume_ingestion(self, stream_id):
        if stream_id in self._paused_streams:
            self.ingestion_management.resume_data_stream(stream_id, self.get_ingestion_config())
            self._paused_streams.remove(stream_id)

    def get_stream_id(self, dataset_id):
        sid, _ = self.resource_registry.find_objects(dataset_id, predicate=PRED.hasStream, id_only=True)
        return sid[0] if len(sid) > 0 else None

    def get_dataset_object(self, dataset_id):
        return self.dataset_management.read_dataset(dataset_id=dataset_id)

    def get_data_product_object(self, data_product_id):
        return self.data_product_management.read_data_product(data_product_id=data_product_id)

    def get_read_only_coverage(self, dataset_id):
        if not self._context_managed:
            warn_user('Warning: Coverages will remain open until they are closed or go out of scope - '
                           'be sure to close coverage instances when you are finished working with them or call self.clean_up(ro_covs=True)')

        # Check if we already have the coverage
        if dataset_id in self._ro_covs:
            cov = self._ro_covs[dataset_id]
            # If it's not closed, return it
            if not cov.closed:
                return cov
            # Otherwise, remove it from self._ro_covs and carry on
            del self._ro_covs[dataset_id]

        self._ro_covs[dataset_id] = DatasetManagementService._get_coverage(dataset_id, mode='r')

        return self._ro_covs[dataset_id]

    def get_editable_coverage(self, dataset_id):
        sid = self.get_stream_id(dataset_id)

        # Check if we already have the coverage
        if sid in self._paused_streams:
            cov = self._w_covs[sid]
            # If it's not closed, return it
            if not cov.closed:
                return cov
            # Otherwise, remove it from self._ro_covs and carry on
            del self._w_covs[sid]

        self.pause_ingestion(sid)
        if not self._context_managed:
            warn_user('Warning: Coverages will remain open until they are closed or go out of scope - '
                           'be sure to close coverage instances when you are finished working with them or call self.clean_up(w_covs=True)')
        try:
            self._w_covs[sid] = DatasetManagementService._get_simplex_coverage(dataset_id, mode='w')
            return self._w_covs[sid]
        except:
            self.resume_ingestion(sid)
            raise

    @classmethod
    def get_parser(cls, data_file_path, config_path=None):
        return SimpleDelimitedParser.get_parser(data_file_path, config_path=config_path)

    def manual_upload(self, dataset_id, data_file_path, config_path=None):
        # First, ensure we can get a parser and parse the data file
        parser = self.get_parser(data_file_path, config_path)
        dat = parser.parse()

        # Get the coverage
        with self.get_editable_coverage(dataset_id) as cov:
            # Find the indices for the times in the data file
            try:
                time_dat = dat[cov.temporal_parameter_name]
            except ValueError, ve:
                if ve.message == 'field named %s not found.' % cov.temporal_parameter_name:
                    raise ValueError('Temporal parameter name {0} not in upload data'.format(cov.temporal_parameter_name))
                else:
                    raise
            cov_times = cov.get_time_values()
            tinds = [utils.find_nearest_index(cov_times, ti) for ti in time_dat]

            sl = (tinds,)
            cparams = cov.list_parameters()
            for n in dat.dtype.names:
                if n != cov.temporal_parameter_name:
                    if n in cparams:
                        cov.set_parameter_values(n, dat[n], sl)
                    else:
                        warn_user('Skipping column \'%s\': matching parameter not found in coverage!' % n)
class TestResourceRegistry(IonIntegrationTestCase):
    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to bank service
        self.resource_registry_service = ResourceRegistryServiceClient()

    @unittest.skip('Represents a bug in storage/retrieval')
    def test_tuple_in_dict(self):
        # create a resource with a tuple saved in a dict
        transform_obj = IonObject(RT.Transform)
        transform_obj.configuration = {}
        transform_obj.configuration["tuple"] = ('STRING', )
        transform_id, _ = self.resource_registry_service.create(transform_obj)

        # read the resource back
        returned_transform_obj = self.resource_registry_service.read(
            transform_id)

        self.assertEqual(transform_obj.configuration["tuple"],
                         returned_transform_obj.configuration["tuple"])

    def test_basics(self):
        # Sequence all the tests so that we can save numerous system start and stops
        self._do_test_crud()
        self._do_test_read_mult()
        self._do_test_lifecycle()
        self._do_test_attach()
        self._do_test_association()
        self._do_test_find_resources()
        self._do_test_find_objects_mult()

    def _do_test_crud(self):
        # Some quick registry tests
        # Can't call new with fields that aren't defined in the object's schema
        with self.assertRaises(TypeError) as cm:
            IonObject("UserInfo", name="name", foo="bar")
        self.assertTrue(cm.exception.message ==
                        "__init__() got an unexpected keyword argument 'foo'")

        # Can't call new with fields that aren't defined in the object's schema
        with self.assertRaises(TypeError) as cm:
            IonObject("UserInfo", {"name": "name", "foo": "bar"})
        self.assertTrue(cm.exception.message ==
                        "__init__() got an unexpected keyword argument 'foo'")

        # Can't call new with fields that aren't defined in the object's schema
        with self.assertRaises(TypeError) as cm:
            IonObject("UserInfo", {"name": "name"}, foo="bar")
        self.assertTrue(cm.exception.message ==
                        "__init__() got an unexpected keyword argument 'foo'")

        # Instantiate an object
        obj = IonObject("UserInfo", name="name")

        # Can set attributes that aren't in the object's schema
        with self.assertRaises(AttributeError) as cm:
            setattr(obj, "foo", "bar")
        self.assertTrue(
            cm.exception.message == "'UserInfo' object has no attribute 'foo'")

        # Cam't call update with object that hasn't been persisted
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.update(obj)
        self.assertTrue(
            cm.exception.message.startswith(
                "Object does not have required '_id' or '_rev' attribute"))

        # Persist object and read it back
        obj_id, obj_rev = self.resource_registry_service.create(obj)
        read_obj = self.resource_registry_service.read(obj_id)

        # Cannot create object with _id and _rev fields pre-set
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create(read_obj)

        # Update object
        read_obj.name = "John Doe"
        self.resource_registry_service.update(read_obj)

        # Update should fail with revision mismatch
        with self.assertRaises(Conflict) as cm:
            self.resource_registry_service.update(read_obj)

        # Re-read and update object
        read_obj = self.resource_registry_service.read(obj_id)
        self.resource_registry_service.update(read_obj)

        # Delete object
        self.resource_registry_service.delete(obj_id)

        # Make sure read, update and delete report error
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.read(obj_id)
        self.assertTrue(cm.exception.message.startswith("Object with id"))

        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.update(read_obj)
        self.assertTrue(cm.exception.message.startswith("Object with id"))

        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.delete(obj_id)
        self.assertTrue(cm.exception.message.startswith("Object with id"))

        # Owner creation tests
        user = IonObject("ActorIdentity", name='user')
        uid, _ = self.resource_registry_service.create(user)

        inst = IonObject("InstrumentDevice", name='instrument')
        iid, _ = self.resource_registry_service.create(
            inst, headers={'ion-actor-id': str(uid)})

        ids, _ = self.resource_registry_service.find_objects(iid,
                                                             PRED.hasOwner,
                                                             RT.ActorIdentity,
                                                             id_only=True)
        self.assertEquals(len(ids), 1)

        assoc = self.resource_registry_service.read(ids[0])
        self.resource_registry_service.delete(iid)

        with self.assertRaises(NotFound) as ex:
            assoc = self.resource_registry_service.read(iid)

    def _do_test_read_mult(self):
        test_resource1_id, _ = self.resource_registry_service.create(
            Resource(name='test1'))
        test_resource2_id, _ = self.resource_registry_service.create(
            Resource(name='test2'))

        res_list = [test_resource1_id, test_resource2_id]

        objects = self.resource_registry_service.read_mult(res_list)

        for o in objects:
            self.assertIsInstance(o, Resource)
            self.assertTrue(o._id in res_list)

    def _do_test_lifecycle(self):
        # Lifecycle tests
        att = IonObject("InstrumentDevice", name='mine', description='desc')

        rid, rev = self.resource_registry_service.create(att)

        att1 = self.resource_registry_service.read(rid)
        self.assertEquals(att1.name, att.name)
        self.assertEquals(att1.lcstate, LCS.DRAFT)
        self.assertEquals(att1.availability, AS.PRIVATE)

        new_state = self.resource_registry_service.execute_lifecycle_transition(
            rid, LCE.PLAN)
        self.assertEquals(new_state, lcstate(LCS.PLANNED, AS.PRIVATE))

        att2 = self.resource_registry_service.read(rid)
        self.assertEquals(att2.lcstate, LCS.PLANNED)
        self.assertEquals(att2.availability, AS.PRIVATE)

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.execute_lifecycle_transition(
                rid, LCE.UNANNOUNCE)
        self.assertTrue(
            "type=InstrumentDevice, lcstate=PLANNED_PRIVATE has no transition for event unannounce"
            in cm.exception.message)

        new_state = self.resource_registry_service.execute_lifecycle_transition(
            rid, LCE.DEVELOP)
        self.assertEquals(new_state, lcstate(LCS.DEVELOPED, AS.PRIVATE))

        with self.assertRaises(BadRequest):
            self.resource_registry_service.execute_lifecycle_transition(
                resource_id=rid, transition_event='NONE##')

        self.resource_registry_service.set_lifecycle_state(
            rid, lcstate(LCS.INTEGRATED, AS.PRIVATE))
        att1 = self.resource_registry_service.read(rid)
        self.assertEquals(att1.lcstate, LCS.INTEGRATED)
        self.assertEquals(att1.availability, AS.PRIVATE)

    def _do_test_attach(self):
        binary = "\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x10\x00\x00\x00\x10\x08\x03\x00\x00\x00(-\x0fS\x00\x00\x00\x03sBIT\x08\x08\x08\xdb\xe1O\xe0\x00\x00\x00~PLTEf3\x00\xfc\xf7\xe0\xee\xcc\x00\xd3\xa0\x00\xcc\x99\x00\xec\xcdc\x9fl\x00\xdd\xb2\x00\xff\xff\xff|I\x00\xf9\xdb\x00\xdd\xb5\x19\xd9\xad\x10\xb6\x83\x00\xf8\xd6\x00\xf2\xc5\x00\xd8\xab\x00n;\x00\xff\xcc\x00\xd6\xa4\t\xeb\xb8\x00\x83Q\x00\xadz\x00\xff\xde\x00\xff\xd6\x00\xd6\xa3\x00\xdf\xaf\x00\xde\xad\x10\xbc\x8e\x00\xec\xbe\x00\xec\xd4d\xff\xe3\x00tA\x00\xf6\xc4\x00\xf6\xce\x00\xa5u\x00\xde\xa5\x00\xf7\xbd\x00\xd6\xad\x08\xdd\xaf\x19\x8cR\x00\xea\xb7\x00\xee\xe9\xdf\xc5\x00\x00\x00\tpHYs\x00\x00\n\xf0\x00\x00\n\xf0\x01B\xac4\x98\x00\x00\x00\x1ctEXtSoftware\x00Adobe Fireworks CS4\x06\xb2\xd3\xa0\x00\x00\x00\x15tEXtCreation Time\x0029/4/09Oq\xfdE\x00\x00\x00\xadIDAT\x18\x95M\x8f\x8d\x0e\x820\x0c\x84;ZdC~f\x07\xb2\x11D\x86\x89\xe8\xfb\xbf\xa0+h\xe2\x97\\\xd2^\x93\xb6\x07:1\x9f)q\x9e\xa5\x06\xad\xd5\x13\x8b\xac,\xb3\x02\x9d\x12C\xa1-\xef;M\x08*\x19\xce\x0e?\x1a\xeb4\xcc\xd4\x0c\x831\x87V\xca\xa1\x1a\xd3\x08@\xe4\xbd\xb7\x15P;\xc8\xd4{\x91\xbf\x11\x90\xffg\xdd\x8di\xfa\xb6\x0bs2Z\xff\xe8yg2\xdc\x11T\x96\xc7\x05\xa5\xef\x96+\xa7\xa59E\xae\xe1\x84cm^1\xa6\xb3\xda\x85\xc8\xd8/\x17se\x0eN^'\x8c\xc7\x8e\x88\xa8\xf6p\x8e\xc2;\xc6.\xd0\x11.\x91o\x12\x7f\xcb\xa5\xfe\x00\x89]\x10:\xf5\x00\x0e\xbf\x00\x00\x00\x00IEND\xaeB`\x82"

        # Owner creation tests
        instrument = IonObject("InstrumentDevice", name='instrument')
        iid, _ = self.resource_registry_service.create(instrument)

        att = Attachment(content=binary, attachment_type=AttachmentType.BLOB)
        aid1 = self.resource_registry_service.create_attachment(iid, att)

        att1 = self.resource_registry_service.read_attachment(
            aid1, include_content=True)
        self.assertEquals(binary, att1.content)

        import base64
        att = Attachment(content=base64.encodestring(binary),
                         attachment_type=AttachmentType.ASCII)
        aid2 = self.resource_registry_service.create_attachment(iid, att)

        att1 = self.resource_registry_service.read_attachment(
            aid2, include_content=True)
        self.assertEquals(binary, base64.decodestring(att1.content))

        att_ids = self.resource_registry_service.find_attachments(iid,
                                                                  id_only=True)
        self.assertEquals(att_ids, [aid1, aid2])

        att_ids = self.resource_registry_service.find_attachments(
            iid, id_only=True, descending=True)
        self.assertEquals(att_ids, [aid2, aid1])

        att_ids = self.resource_registry_service.find_attachments(
            iid, id_only=True, descending=True, limit=1)
        self.assertEquals(att_ids, [aid2])

        atts = self.resource_registry_service.find_attachments(
            iid, id_only=False, include_content=True, limit=1)
        self.assertEquals(atts[0].content, binary)

        self.resource_registry_service.delete_attachment(aid1)

        att_ids = self.resource_registry_service.find_attachments(iid,
                                                                  id_only=True)
        self.assertEquals(att_ids, [aid2])

        self.resource_registry_service.delete_attachment(aid2)

        att_ids = self.resource_registry_service.find_attachments(iid,
                                                                  id_only=True)
        self.assertEquals(att_ids, [])

    def _do_test_association(self):
        # Instantiate ActorIdentity object
        actor_identity_obj = IonObject("ActorIdentity", name="name")
        actor_identity_obj_id, actor_identity_obj_rev = self.resource_registry_service.create(
            actor_identity_obj)
        read_actor_identity_obj = self.resource_registry_service.read(
            actor_identity_obj_id)

        # Instantiate UserInfo object
        user_info_obj = IonObject("UserInfo", name="name")
        user_info_obj_id, user_info_obj_rev = self.resource_registry_service.create(
            user_info_obj)
        read_user_info_obj = self.resource_registry_service.read(
            user_info_obj_id)

        # Test create failures
        with self.assertRaises(AttributeError) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj_id, PRED.bogus, user_info_obj_id)
        self.assertTrue(cm.exception.message == "bogus")

        # Predicate not provided
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj_id, None, user_info_obj_id)
        self.assertTrue(
            cm.exception.message == "Association must have all elements set")

        # Subject id or object not provided
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                None, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(
            cm.exception.message == "Association must have all elements set")

        # Object id or object not provided
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj_id, PRED.hasInfo, None)
        self.assertTrue(
            cm.exception.message == "Association must have all elements set")

        # Bad subject id
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.create_association(
                "bogus", PRED.hasInfo, user_info_obj_id)
        self.assertTrue(
            cm.exception.message == "Object with id bogus does not exist.")

        # Bad object id
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj_id, PRED.hasInfo, "bogus")
        self.assertTrue(
            cm.exception.message == "Object with id bogus does not exist.")

        # _id missing from subject
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(cm.exception.message.startswith("Subject id"))

        # _id missing from object
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj_id, PRED.hasInfo, user_info_obj)
        self.assertTrue(cm.exception.message.startswith("Object id"))

        # Wrong subject type
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                user_info_obj_id, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(cm.exception.message ==
                        "Illegal subject type UserInfo for predicate hasInfo")

        # Wrong object type
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj_id, PRED.hasInfo, actor_identity_obj_id)
        self.assertTrue(
            cm.exception.message ==
            "Illegal object type ActorIdentity for predicate hasInfo")

        # Create two different association types between the same subject and predicate
        assoc_id1, assoc_rev1 = self.resource_registry_service.create_association(
            actor_identity_obj_id, PRED.hasInfo, user_info_obj_id)

        # Read object, subject
        res_obj1 = self.resource_registry_service.read_object(
            actor_identity_obj_id, PRED.hasInfo, RT.UserInfo)
        self.assertEquals(res_obj1._id, user_info_obj_id)
        res_obj1 = self.resource_registry_service.read_object(
            actor_identity_obj_id, PRED.hasInfo, RT.UserInfo, id_only=True)
        self.assertEquals(res_obj1, user_info_obj_id)
        res_obj2 = self.resource_registry_service.read_subject(
            RT.ActorIdentity, PRED.hasInfo, user_info_obj_id)
        self.assertEquals(res_obj2._id, actor_identity_obj_id)
        res_obj2 = self.resource_registry_service.read_subject(
            RT.ActorIdentity, PRED.hasInfo, user_info_obj_id, id_only=True)
        self.assertEquals(res_obj2, actor_identity_obj_id)

        # Search for associations (good cases)
        ret1 = self.resource_registry_service.find_associations(
            actor_identity_obj_id, PRED.hasInfo, user_info_obj_id)
        ret2 = self.resource_registry_service.find_associations(
            actor_identity_obj_id, PRED.hasInfo)
        ret3 = self.resource_registry_service.find_associations(
            None, PRED.hasInfo)
        self.assertTrue(len(ret1) == len(ret2) == len(ret3))
        self.assertTrue(ret1[0]._id == ret2[0]._id == ret3[0]._id)

        ret1 = self.resource_registry_service.find_associations(
            actor_identity_obj_id, PRED.hasInfo, user_info_obj_id, None, False)
        ret2 = self.resource_registry_service.find_associations(
            actor_identity_obj_id, PRED.hasInfo, id_only=False)
        ret3 = self.resource_registry_service.find_associations(
            predicate=PRED.hasInfo, id_only=False)
        self.assertTrue(ret1 == ret2 == ret3)

        # Search for associations (good cases)
        ret1 = self.resource_registry_service.find_associations(
            read_actor_identity_obj, PRED.hasInfo, read_user_info_obj)
        ret2 = self.resource_registry_service.find_associations(
            read_actor_identity_obj, PRED.hasInfo)
        ret3 = self.resource_registry_service.find_associations(
            None, PRED.hasInfo)
        self.assertTrue(len(ret1) == len(ret2) == len(ret3))
        self.assertTrue(ret1[0]._id == ret2[0]._id == ret3[0]._id)

        ret1 = self.resource_registry_service.find_associations(
            actor_identity_obj_id, PRED.hasInfo, read_user_info_obj, None,
            True)
        ret2 = self.resource_registry_service.find_associations(
            actor_identity_obj_id, PRED.hasInfo, id_only=True)
        ret3 = self.resource_registry_service.find_associations(
            predicate=PRED.hasInfo, id_only=True)
        self.assertTrue(ret1 == ret2 == ret3)

        # Search for associations (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_associations(None, None, None)
        self.assertIn("Illegal parameters", cm.exception.message)

        # Find subjects (good cases)
        subj_ret1 = self.resource_registry_service.find_subjects(
            RT.ActorIdentity, PRED.hasInfo, user_info_obj_id, True)
        subj_ret2 = self.resource_registry_service.find_subjects(
            RT.ActorIdentity, PRED.hasInfo, read_user_info_obj, True)
        self.assertTrue(len(subj_ret1) == len(subj_ret2))
        self.assertTrue(subj_ret1[0] == subj_ret2[0])
        self.assertTrue(subj_ret1[1][0]._id == subj_ret2[1][0]._id)

        subj_ret3 = self.resource_registry_service.find_subjects(
            None, PRED.hasInfo, user_info_obj_id, True)
        subj_ret4 = self.resource_registry_service.find_subjects(
            None, None, read_user_info_obj, True)
        self.assertTrue(len(subj_ret3) == len(subj_ret4))
        self.assertTrue(subj_ret3[0] == subj_ret4[0])
        self.assertTrue(subj_ret3[1][0]._id == subj_ret4[1][0]._id)

        subj_ret5 = self.resource_registry_service.find_subjects(
            None, PRED.hasInfo, user_info_obj_id, False)
        subj_ret6 = self.resource_registry_service.find_subjects(
            None, None, read_user_info_obj, False)
        self.assertTrue(len(subj_ret5) == len(subj_ret6))
        self.assertTrue(subj_ret5[0][0]._id == subj_ret6[0][0]._id)
        self.assertTrue(subj_ret5[1][0]._id == subj_ret6[1][0]._id)

        # Find subjects (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_subjects(None, None, None)
        self.assertTrue(cm.exception.message == "Must provide object")

        with self.assertRaises(AttributeError) as cm:
            self.resource_registry_service.find_subjects(
                RT.UserCredentials, PRED.bogus, user_info_obj_id, True)
        self.assertTrue(cm.exception.message == "bogus")

        ret = self.resource_registry_service.find_subjects(
            RT.UserInfo, PRED.hasCredentials, user_info_obj_id, True)
        self.assertTrue(len(ret[0]) == 0)

        ret = self.resource_registry_service.find_subjects(
            RT.UserCredentials, PRED.hasInfo, user_info_obj_id, True)
        self.assertTrue(len(ret[0]) == 0)

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_subjects(
                RT.UserCredentials, PRED.hasInfo, user_info_obj, True)
        self.assertTrue(
            cm.exception.message == "Object id not available in object")

        # Find objects (good cases)
        subj_ret1 = self.resource_registry_service.find_objects(
            actor_identity_obj_id, PRED.hasInfo, RT.UserInfo, True)
        subj_ret2 = self.resource_registry_service.find_objects(
            read_actor_identity_obj, PRED.hasInfo, RT.UserInfo, True)
        self.assertTrue(len(subj_ret1) == len(subj_ret2))
        self.assertTrue(subj_ret1[0] == subj_ret2[0])
        self.assertTrue(subj_ret1[1][0]._id == subj_ret2[1][0]._id)

        subj_ret3 = self.resource_registry_service.find_objects(
            actor_identity_obj_id, PRED.hasInfo, None, True)
        subj_ret4 = self.resource_registry_service.find_objects(
            actor_identity_obj_id, None, None, True)
        self.assertTrue(len(subj_ret3) == len(subj_ret4))
        self.assertTrue(subj_ret3[0] == subj_ret4[0])
        self.assertTrue(subj_ret3[1][0]._id == subj_ret4[1][0]._id)

        subj_ret5 = self.resource_registry_service.find_objects(
            actor_identity_obj_id, PRED.hasInfo, None, False)
        subj_ret6 = self.resource_registry_service.find_objects(
            read_actor_identity_obj, None, None, False)
        self.assertTrue(len(subj_ret5) == len(subj_ret6))
        self.assertTrue(subj_ret5[0][0]._id == subj_ret6[0][0]._id)
        self.assertTrue(subj_ret5[1][0]._id == subj_ret6[1][0]._id)

        # Find objects (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_objects(None, None, None)
        self.assertTrue(cm.exception.message == "Must provide subject")

        with self.assertRaises(AttributeError) as cm:
            self.resource_registry_service.find_objects(
                actor_identity_obj_id, PRED.bogus, RT.UserCredentials, True)
        self.assertTrue(cm.exception.message == "bogus")

        ret = self.resource_registry_service.find_objects(
            actor_identity_obj_id, PRED.hasCredentials, RT.ActorIdentity, True)
        self.assertTrue(len(ret[0]) == 0)

        ret = self.resource_registry_service.find_objects(
            actor_identity_obj_id, PRED.hasInfo, RT.UserCredentials, True)
        self.assertTrue(len(ret[0]) == 0)

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_objects(
                actor_identity_obj, PRED.hasInfo, RT.UserInfo, True)
        self.assertTrue(
            cm.exception.message == "Object id not available in subject")

        # Get association (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.get_association(None, None, None)
        self.assertIn("Illegal parameters", cm.exception.message)

        assoc = self.resource_registry_service.get_association(
            actor_identity_obj_id, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(assoc._id == assoc_id1)

        # Delete (bad cases)
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.delete_association("bogus")
        self.assertTrue(
            cm.exception.message == "Object with id bogus does not exist.")

        # Delete other association
        self.resource_registry_service.delete_association(assoc_id1)

        # Delete resources
        self.resource_registry_service.delete(actor_identity_obj_id)
        self.resource_registry_service.delete(user_info_obj_id)

    def _do_test_find_resources(self):
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_resources(
                RT.UserInfo, LCS.DRAFT, "name", False)
        self.assertTrue(
            cm.exception.message == "find by name does not support lcstate")

        ret = self.resource_registry_service.find_resources(
            RT.UserInfo, None, "name", False)
        self.assertEquals(len(ret[0]), 0)

        # Instantiate an object
        obj = IonObject("InstrumentAgentInstance", name="name")

        # Persist object and read it back
        obj_id, obj_rev = self.resource_registry_service.create(obj)
        read_obj = self.resource_registry_service.read(obj_id)

        ret = self.resource_registry_service.find_resources(
            RT.InstrumentAgentInstance, None, "name", False)
        self.assertEquals(len(ret[0]), 1)
        self.assertEquals(ret[0][0]._id, read_obj._id)

        ret = self.resource_registry_service.find_resources(
            RT.InstrumentAgentInstance, LCS.DEPLOYED, None, False)
        self.assertEquals(len(ret[0]), 1)
        self.assertEquals(ret[0][0]._id, read_obj._id)

    def _do_test_find_objects_mult(self):
        dp = DataProcess()
        transform = Transform()
        pd = ProcessDefinition()

        dp_id, _ = self.resource_registry_service.create(dp)
        transform_id, _ = self.resource_registry_service.create(transform)
        pd_id, _ = self.resource_registry_service.create(pd)

        self.resource_registry_service.create_association(
            subject=dp_id, object=transform_id, predicate=PRED.hasTransform)
        self.resource_registry_service.create_association(
            subject=transform_id,
            object=pd_id,
            predicate=PRED.hasProcessDefinition)

        results, _ = self.resource_registry_service.find_objects_mult(
            subjects=[dp_id], id_only=True)
        self.assertTrue(results == [transform_id])

        results, _ = self.resource_registry_service.find_objects_mult(
            subjects=[dp_id, transform_id], id_only=True)
        results.sort()
        correct = [transform_id, pd_id]
        correct.sort()
        self.assertTrue(results == correct)

    @attr('EXT')
    def test_get_resource_extension(self):

        #Testing multiple instrument owners
        subject1 = "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Roger Unwin A254"

        actor_identity_obj1 = IonObject(RT.ActorIdentity, {"name": subject1})
        actor_id1, _ = self.resource_registry_service.create(
            actor_identity_obj1)

        user_info_obj1 = IonObject(RT.UserInfo, {"name": "Foo"})
        user_info_id1, _ = self.resource_registry_service.create(
            user_info_obj1)
        self.resource_registry_service.create_association(
            actor_id1, PRED.hasInfo, user_info_id1)

        subject2 = "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Bob Cumbers A256"

        actor_identity_obj2 = IonObject(RT.ActorIdentity, {"name": subject2})
        actor_id2, _ = self.resource_registry_service.create(
            actor_identity_obj2)

        user_info_obj2 = IonObject(RT.UserInfo, {"name": "Foo2"})
        user_info_id2, _ = self.resource_registry_service.create(
            user_info_obj2)
        self.resource_registry_service.create_association(
            actor_id2, PRED.hasInfo, user_info_id2)

        test_obj = IonObject(RT.InformationResource, {"name": "TestResource"})
        test_obj_id, _ = self.resource_registry_service.create(test_obj)
        self.resource_registry_service.create_association(
            test_obj_id, PRED.hasOwner, actor_id1)
        self.resource_registry_service.create_association(
            test_obj_id, PRED.hasOwner, actor_id2)

        extended_resource = self.resource_registry_service.get_resource_extension(
            test_obj_id, OT.ExtendedInformationResource)

        self.assertEqual(test_obj_id, extended_resource._id)
        self.assertEqual(len(extended_resource.owners), 2)

        extended_resource_list = self.resource_registry_service.get_resource_extension(
            str([user_info_id1, user_info_id2]),
            OT.ExtendedInformationResource)
        self.assertEqual(len(extended_resource_list), 2)

        optional_args = {'user_id': user_info_id1}
        extended_resource = self.resource_registry_service.get_resource_extension(
            test_obj_id,
            OT.TestExtendedInformationResource,
            optional_args=optional_args)

        self.assertEqual(test_obj_id, extended_resource._id)
        self.assertEqual(len(extended_resource.owners), 2)
        self.assertEqual(extended_resource.user_id, user_info_id1)

    @attr('PREP')
    def test_prepare_resource_support(self):

        prepare_data = self.resource_registry_service.prepare_resource_support(
            resource_type=RT.StreamDefinition)

        self.assertEqual(prepare_data.create_request.service_name,
                         "resource_registry")
        self.assertEqual(prepare_data.create_request.service_operation,
                         "create")
        self.assertEqual(prepare_data.create_request.request_parameters,
                         {"object": "$(object)"})

        self.assertEqual(prepare_data.update_request.service_name,
                         "resource_registry")
        self.assertEqual(prepare_data.update_request.service_operation,
                         "update")
        self.assertEqual(prepare_data.update_request.request_parameters,
                         {"object": "$(object)"})

        res_id, _ = self.resource_registry_service.create(
            prepare_data.resource)

        prepare_data = self.resource_registry_service.prepare_resource_support(
            resource_type=RT.StreamDefinition, resource_id=res_id)

        prepare_data.resource.name = "test_stream_def"
        prepare_data.resource.stream_type = "test_type"

        stream_def_id, _ = self.resource_registry_service.update(
            prepare_data.resource)

        #def ion_object_encoder(obj):
        #    return obj.__dict__

        #print simplejson.dumps(prepare_data, default=ion_object_encoder, indent=2)

        stream_def = self.resource_registry_service.read(stream_def_id)

        self.assertEqual(stream_def.name, prepare_data.resource.name)
        self.assertEqual(stream_def.stream_type,
                         prepare_data.resource.stream_type)
class NotificationWorker(TransformEventListener):
    """
    Instances of this class acts as a Notification Worker.
    """
    def on_init(self):
        self.user_info = {}
        self.resource_registry = ResourceRegistryServiceClient()
        self.q = gevent.queue.Queue()

        super(NotificationWorker, self).on_init()

    def test_hook(self, user_info, reverse_user_info ):
        '''
        This method exists only to facilitate the testing of the reload of the user_info dictionary
        '''
        self.q.put((user_info, reverse_user_info))

    def on_start(self):
        super(NotificationWorker,self).on_start()

        self.reverse_user_info = None
        self.user_info = None

        #------------------------------------------------------------------------------------
        # Start by loading the user info and reverse user info dictionaries
        #------------------------------------------------------------------------------------

        try:
            self.user_info = self.load_user_info()
            self.reverse_user_info =  calculate_reverse_user_info(self.user_info)

            log.debug("On start up, notification workers loaded the following user_info dictionary: %s" % self.user_info)
            log.debug("The calculated reverse user info: %s" % self.reverse_user_info )

        except NotFound as exc:
            if exc.message.find('users_index') > -1:
                log.warning("Notification workers found on start up that users_index have not been loaded yet.")
            else:
                raise NotFound(exc.message)

        #------------------------------------------------------------------------------------
        # Create an event subscriber for Reload User Info events
        #------------------------------------------------------------------------------------

        def reload_user_info(event_msg, headers):
            '''
            Callback method for the subscriber to ReloadUserInfoEvent
            '''

            notification_id =  event_msg.notification_id
            log.debug("(Notification worker received a ReloadNotificationEvent. The relevant notification_id is %s" % notification_id)

            try:
                self.user_info = self.load_user_info()
            except NotFound:
                log.warning("ElasticSearch has not yet loaded the user_index.")

            self.reverse_user_info =  calculate_reverse_user_info(self.user_info)
            self.test_hook(self.user_info, self.reverse_user_info)

            log.debug("After a reload, the user_info: %s" % self.user_info)
            log.debug("The recalculated reverse_user_info: %s" % self.reverse_user_info)

        # the subscriber for the ReloadUSerInfoEvent
        self.reload_user_info_subscriber = EventSubscriber(
            event_type="ReloadUserInfoEvent",
            origin='UserNotificationService',
            callback=reload_user_info
        )
        self.reload_user_info_subscriber.start()


    def process_event(self, msg, headers):
        """
        Callback method for the subscriber listening for all events
        """
        #------------------------------------------------------------------------------------
        # From the reverse user info dict find out which users have subscribed to that event
        #------------------------------------------------------------------------------------

        user_ids = []
        if self.reverse_user_info:
            log.debug("Notification worker checking for users interested in %s" % msg.type_)
            user_ids = check_user_notification_interest(event = msg, reverse_user_info = self.reverse_user_info)

        log.debug("Notification worker deduced the following users were interested in the event: %s, event_type: %s, origin: %s" % (user_ids, msg.type_, msg.origin ))
        #------------------------------------------------------------------------------------
        # Send email to the users
        #------------------------------------------------------------------------------------

        for user_id in user_ids:
            msg_recipient = self.user_info[user_id]['user_contact'].email
            self.smtp_client = setting_up_smtp_client()
            send_email(message = msg, msg_recipient = msg_recipient, smtp_client = self.smtp_client )
            self.smtp_client.quit()

    def on_stop(self):
        # close subscribers safely
        self.reload_user_info_subscriber.stop()

        super(NotificationWorker, self).on_stop()

    def on_quit(self):
        # close subscribers safely
        self.reload_user_info_subscriber.stop()

        super(NotificationWorker, self).on_quit()

    def load_user_info(self):
        '''
        Method to load the user info dictionary used by the notification workers and the UNS

        @retval user_info dict
        '''

        users, _ = self.resource_registry.find_resources(restype= RT.UserInfo)

        user_info = {}

        if not users:
            return {}

        for user in users:
            notifications = []
            notification_preferences = None
            for variable in user.variables:
                if variable['name'] == 'notifications':
                    notifications = variable['value']

                if variable['name'] == 'notification_preferences':
                    notification_preferences = variable['value']

            user_info[user._id] = { 'user_contact' : user.contact, 'notifications' : notifications, 'notification_preferences' : notification_preferences}

        return user_info
示例#38
0
class PubsubManagementIntTest(IonIntegrationTestCase):
    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        self.pubsub_management = PubsubManagementServiceClient()
        self.resource_registry = ResourceRegistryServiceClient()
        self.dataset_management = DatasetManagementServiceClient()

        self.pdicts = {}
        self.queue_cleanup = list()
        self.exchange_cleanup = list()

    def tearDown(self):
        for queue in self.queue_cleanup:
            xn = self.container.ex_manager.create_xn_queue(queue)
            xn.delete()
        for exchange in self.exchange_cleanup:
            xp = self.container.ex_manager.create_xp(exchange)
            xp.delete()

    def test_stream_def_crud(self):

        # Test Creation
        pdict = DatasetManagementService.get_parameter_dictionary_by_name(
            'ctd_parsed_param_dict')
        stream_definition_id = self.pubsub_management.create_stream_definition(
            'ctd parsed', parameter_dictionary_id=pdict.identifier)

        # Make sure there is an assoc
        self.assertTrue(
            self.resource_registry.find_associations(
                subject=stream_definition_id,
                predicate=PRED.hasParameterDictionary,
                object=pdict.identifier,
                id_only=True))

        # Test Reading
        stream_definition = self.pubsub_management.read_stream_definition(
            stream_definition_id)
        self.assertTrue(
            PubsubManagementService._compare_pdicts(
                pdict.dump(), stream_definition.parameter_dictionary))

        # Test Deleting
        self.pubsub_management.delete_stream_definition(stream_definition_id)
        self.assertFalse(
            self.resource_registry.find_associations(
                subject=stream_definition_id,
                predicate=PRED.hasParameterDictionary,
                object=pdict.identifier,
                id_only=True))

        # Test comparisons
        in_stream_definition_id = self.pubsub_management.create_stream_definition(
            'L0 products',
            parameter_dictionary_id=pdict.identifier,
            available_fields=['time', 'temp', 'conductivity', 'pressure'])
        self.addCleanup(self.pubsub_management.delete_stream_definition,
                        in_stream_definition_id)

        out_stream_definition_id = in_stream_definition_id
        self.assertTrue(
            self.pubsub_management.compare_stream_definition(
                in_stream_definition_id, out_stream_definition_id))
        self.assertTrue(
            self.pubsub_management.compatible_stream_definitions(
                in_stream_definition_id, out_stream_definition_id))

        out_stream_definition_id = self.pubsub_management.create_stream_definition(
            'L2 Products',
            parameter_dictionary_id=pdict.identifier,
            available_fields=['time', 'salinity', 'density'])
        self.addCleanup(self.pubsub_management.delete_stream_definition,
                        out_stream_definition_id)
        self.assertFalse(
            self.pubsub_management.compare_stream_definition(
                in_stream_definition_id, out_stream_definition_id))

        self.assertTrue(
            self.pubsub_management.compatible_stream_definitions(
                in_stream_definition_id, out_stream_definition_id))

    def test_validate_stream_defs(self):
        #test no input
        incoming_pdict_id = self._get_pdict(
            ['TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0'])
        outgoing_pdict_id = self._get_pdict(
            ['DENSITY', 'PRACSAL', 'TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1'])
        available_fields_in = []
        available_fields_out = []
        incoming_stream_def_id = self.pubsub_management.create_stream_definition(
            'in_sd_0',
            parameter_dictionary_id=incoming_pdict_id,
            available_fields=available_fields_in)
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition(
            'out_sd_0',
            parameter_dictionary_id=outgoing_pdict_id,
            available_fields=available_fields_out)
        result = self.pubsub_management.validate_stream_defs(
            incoming_stream_def_id, outgoing_stream_def_id)
        self.assertFalse(result)

        #test input with no output
        incoming_pdict_id = self._get_pdict(
            ['TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0'])
        outgoing_pdict_id = self._get_pdict(
            ['DENSITY', 'PRACSAL', 'TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1'])
        available_fields_in = [
            'TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0'
        ]
        available_fields_out = []
        incoming_stream_def_id = self.pubsub_management.create_stream_definition(
            'in_sd_1',
            parameter_dictionary_id=incoming_pdict_id,
            available_fields=available_fields_in)
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition(
            'out_sd_1',
            parameter_dictionary_id=outgoing_pdict_id,
            available_fields=available_fields_out)
        result = self.pubsub_management.validate_stream_defs(
            incoming_stream_def_id, outgoing_stream_def_id)
        self.assertTrue(result)

        #test available field missing parameter context definition -- missing PRESWAT_L0
        incoming_pdict_id = self._get_pdict(
            ['TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0'])
        outgoing_pdict_id = self._get_pdict(
            ['DENSITY', 'PRACSAL', 'TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1'])
        available_fields_in = [
            'TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0'
        ]
        available_fields_out = ['DENSITY']
        incoming_stream_def_id = self.pubsub_management.create_stream_definition(
            'in_sd_2',
            parameter_dictionary_id=incoming_pdict_id,
            available_fields=available_fields_in)
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition(
            'out_sd_2',
            parameter_dictionary_id=outgoing_pdict_id,
            available_fields=available_fields_out)
        result = self.pubsub_management.validate_stream_defs(
            incoming_stream_def_id, outgoing_stream_def_id)
        self.assertFalse(result)

        #test l1 from l0
        incoming_pdict_id = self._get_pdict(
            ['TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0'])
        outgoing_pdict_id = self._get_pdict(
            ['TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1'])
        available_fields_in = [
            'TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0'
        ]
        available_fields_out = ['TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1']
        incoming_stream_def_id = self.pubsub_management.create_stream_definition(
            'in_sd_3',
            parameter_dictionary_id=incoming_pdict_id,
            available_fields=available_fields_in)
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition(
            'out_sd_3',
            parameter_dictionary_id=outgoing_pdict_id,
            available_fields=available_fields_out)
        result = self.pubsub_management.validate_stream_defs(
            incoming_stream_def_id, outgoing_stream_def_id)
        self.assertTrue(result)

        #test l2 from l0
        incoming_pdict_id = self._get_pdict(
            ['TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0'])
        outgoing_pdict_id = self._get_pdict(
            ['TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1', 'DENSITY', 'PRACSAL'])
        available_fields_in = [
            'TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0'
        ]
        available_fields_out = ['DENSITY', 'PRACSAL']
        incoming_stream_def_id = self.pubsub_management.create_stream_definition(
            'in_sd_4',
            parameter_dictionary_id=incoming_pdict_id,
            available_fields=available_fields_in)
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition(
            'out_sd_4',
            parameter_dictionary_id=outgoing_pdict_id,
            available_fields=available_fields_out)
        result = self.pubsub_management.validate_stream_defs(
            incoming_stream_def_id, outgoing_stream_def_id)
        self.assertTrue(result)

        #test Ln from L0
        incoming_pdict_id = self._get_pdict(
            ['TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0'])
        outgoing_pdict_id = self._get_pdict(
            ['DENSITY', 'PRACSAL', 'TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1'])
        available_fields_in = [
            'TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0'
        ]
        available_fields_out = [
            'DENSITY', 'PRACSAL', 'TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1'
        ]
        incoming_stream_def_id = self.pubsub_management.create_stream_definition(
            'in_sd_5',
            parameter_dictionary_id=incoming_pdict_id,
            available_fields=available_fields_in)
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition(
            'out_sd_5',
            parameter_dictionary_id=outgoing_pdict_id,
            available_fields=available_fields_out)
        result = self.pubsub_management.validate_stream_defs(
            incoming_stream_def_id, outgoing_stream_def_id)
        self.assertTrue(result)

        #test L2 from L1
        incoming_pdict_id = self._get_pdict(
            ['TIME', 'LAT', 'LON', 'TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1'])
        outgoing_pdict_id = self._get_pdict(
            ['DENSITY', 'PRACSAL', 'TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1'])
        available_fields_in = [
            'TIME', 'LAT', 'LON', 'TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1'
        ]
        available_fields_out = ['DENSITY', 'PRACSAL']
        incoming_stream_def_id = self.pubsub_management.create_stream_definition(
            'in_sd_6',
            parameter_dictionary_id=incoming_pdict_id,
            available_fields=available_fields_in)
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition(
            'out_sd_6',
            parameter_dictionary_id=outgoing_pdict_id,
            available_fields=available_fields_out)
        result = self.pubsub_management.validate_stream_defs(
            incoming_stream_def_id, outgoing_stream_def_id)
        self.assertTrue(result)

        #test L1 from L0 missing L0
        incoming_pdict_id = self._get_pdict(['TIME', 'LAT', 'LON'])
        outgoing_pdict_id = self._get_pdict(
            ['TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1'])
        available_fields_in = ['TIME', 'LAT', 'LON']
        available_fields_out = ['DENSITY', 'PRACSAL']
        incoming_stream_def_id = self.pubsub_management.create_stream_definition(
            'in_sd_7',
            parameter_dictionary_id=incoming_pdict_id,
            available_fields=available_fields_in)
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition(
            'out_sd_7',
            parameter_dictionary_id=outgoing_pdict_id,
            available_fields=available_fields_out)
        result = self.pubsub_management.validate_stream_defs(
            incoming_stream_def_id, outgoing_stream_def_id)
        self.assertFalse(result)

        #test L2 from L0 missing L0
        incoming_pdict_id = self._get_pdict(['TIME', 'LAT', 'LON'])
        outgoing_pdict_id = self._get_pdict(
            ['DENSITY', 'PRACSAL', 'TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1'])
        available_fields_in = ['TIME', 'LAT', 'LON']
        available_fields_out = ['DENSITY', 'PRACSAL']
        incoming_stream_def_id = self.pubsub_management.create_stream_definition(
            'in_sd_8',
            parameter_dictionary_id=incoming_pdict_id,
            available_fields=available_fields_in)
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition(
            'out_sd_8',
            parameter_dictionary_id=outgoing_pdict_id,
            available_fields=available_fields_out)
        result = self.pubsub_management.validate_stream_defs(
            incoming_stream_def_id, outgoing_stream_def_id)
        self.assertFalse(result)

        #test L2 from L0 missing L1
        incoming_pdict_id = self._get_pdict(
            ['TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0'])
        outgoing_pdict_id = self._get_pdict(['DENSITY', 'PRACSAL'])
        available_fields_in = [
            'TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0'
        ]
        available_fields_out = ['DENSITY', 'PRACSAL']
        incoming_stream_def_id = self.pubsub_management.create_stream_definition(
            'in_sd_9',
            parameter_dictionary_id=incoming_pdict_id,
            available_fields=available_fields_in)
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition(
            'out_sd_9',
            parameter_dictionary_id=outgoing_pdict_id,
            available_fields=available_fields_out)
        result = self.pubsub_management.validate_stream_defs(
            incoming_stream_def_id, outgoing_stream_def_id)
        self.assertFalse(result)

    def publish_on_stream(self, stream_id, msg):
        stream = self.pubsub_management.read_stream(stream_id)
        stream_route = stream.stream_route
        publisher = StandaloneStreamPublisher(stream_id=stream_id,
                                              stream_route=stream_route)
        publisher.publish(msg)

    def test_stream_crud(self):
        stream_def_id = self.pubsub_management.create_stream_definition(
            'test_definition', stream_type='stream')
        topic_id = self.pubsub_management.create_topic(
            name='test_topic', exchange_point='test_exchange')
        self.exchange_cleanup.append('test_exchange')
        topic2_id = self.pubsub_management.create_topic(
            name='another_topic', exchange_point='outside')
        stream_id, route = self.pubsub_management.create_stream(
            name='test_stream',
            topic_ids=[topic_id, topic2_id],
            exchange_point='test_exchange',
            stream_definition_id=stream_def_id)

        topics, assocs = self.resource_registry.find_objects(
            subject=stream_id, predicate=PRED.hasTopic, id_only=True)
        self.assertEquals(topics, [topic_id])

        defs, assocs = self.resource_registry.find_objects(
            subject=stream_id,
            predicate=PRED.hasStreamDefinition,
            id_only=True)
        self.assertTrue(len(defs))

        stream = self.pubsub_management.read_stream(stream_id)
        self.assertEquals(stream.name, 'test_stream')
        self.pubsub_management.delete_stream(stream_id)

        with self.assertRaises(NotFound):
            self.pubsub_management.read_stream(stream_id)

        defs, assocs = self.resource_registry.find_objects(
            subject=stream_id,
            predicate=PRED.hasStreamDefinition,
            id_only=True)
        self.assertFalse(len(defs))

        topics, assocs = self.resource_registry.find_objects(
            subject=stream_id, predicate=PRED.hasTopic, id_only=True)
        self.assertFalse(len(topics))

        self.pubsub_management.delete_topic(topic_id)
        self.pubsub_management.delete_topic(topic2_id)
        self.pubsub_management.delete_stream_definition(stream_def_id)

    def test_subscription_crud(self):
        stream_def_id = self.pubsub_management.create_stream_definition(
            'test_definition', stream_type='stream')
        stream_id, route = self.pubsub_management.create_stream(
            name='test_stream',
            exchange_point='test_exchange',
            stream_definition_id=stream_def_id)
        subscription_id = self.pubsub_management.create_subscription(
            name='test subscription',
            stream_ids=[stream_id],
            exchange_name='test_queue')
        self.exchange_cleanup.append('test_exchange')

        subs, assocs = self.resource_registry.find_objects(
            subject=subscription_id, predicate=PRED.hasStream, id_only=True)
        self.assertEquals(subs, [stream_id])

        res, _ = self.resource_registry.find_resources(restype=RT.ExchangeName,
                                                       name='test_queue',
                                                       id_only=True)
        self.assertEquals(len(res), 1)

        subs, assocs = self.resource_registry.find_subjects(
            object=subscription_id,
            predicate=PRED.hasSubscription,
            id_only=True)
        self.assertEquals(subs[0], res[0])

        subscription = self.pubsub_management.read_subscription(
            subscription_id)
        self.assertEquals(subscription.exchange_name, 'test_queue')

        self.pubsub_management.delete_subscription(subscription_id)

        subs, assocs = self.resource_registry.find_objects(
            subject=subscription_id, predicate=PRED.hasStream, id_only=True)
        self.assertFalse(len(subs))

        subs, assocs = self.resource_registry.find_subjects(
            object=subscription_id,
            predicate=PRED.hasSubscription,
            id_only=True)
        self.assertFalse(len(subs))

        self.pubsub_management.delete_stream(stream_id)
        self.pubsub_management.delete_stream_definition(stream_def_id)

    def test_move_before_activate(self):
        stream_id, route = self.pubsub_management.create_stream(
            name='test_stream', exchange_point='test_xp')

        #--------------------------------------------------------------------------------
        # Test moving before activate
        #--------------------------------------------------------------------------------

        subscription_id = self.pubsub_management.create_subscription(
            'first_queue', stream_ids=[stream_id])

        xn_ids, _ = self.resource_registry.find_resources(
            restype=RT.ExchangeName, name='first_queue', id_only=True)
        subjects, _ = self.resource_registry.find_subjects(
            object=subscription_id,
            predicate=PRED.hasSubscription,
            id_only=True)
        self.assertEquals(xn_ids[0], subjects[0])

        self.pubsub_management.move_subscription(subscription_id,
                                                 exchange_name='second_queue')

        xn_ids, _ = self.resource_registry.find_resources(
            restype=RT.ExchangeName, name='second_queue', id_only=True)
        subjects, _ = self.resource_registry.find_subjects(
            object=subscription_id,
            predicate=PRED.hasSubscription,
            id_only=True)

        self.assertEquals(len(subjects), 1)
        self.assertEquals(subjects[0], xn_ids[0])

        self.pubsub_management.delete_subscription(subscription_id)
        self.pubsub_management.delete_stream(stream_id)

    def test_move_activated_subscription(self):

        stream_id, route = self.pubsub_management.create_stream(
            name='test_stream', exchange_point='test_xp')
        #--------------------------------------------------------------------------------
        # Test moving after activate
        #--------------------------------------------------------------------------------

        subscription_id = self.pubsub_management.create_subscription(
            'first_queue', stream_ids=[stream_id])
        self.pubsub_management.activate_subscription(subscription_id)

        xn_ids, _ = self.resource_registry.find_resources(
            restype=RT.ExchangeName, name='first_queue', id_only=True)
        subjects, _ = self.resource_registry.find_subjects(
            object=subscription_id,
            predicate=PRED.hasSubscription,
            id_only=True)
        self.assertEquals(xn_ids[0], subjects[0])

        self.verified = Event()

        def verify(m, r, s):
            self.assertEquals(m, 'verified')
            self.verified.set()

        subscriber = StandaloneStreamSubscriber('second_queue', verify)
        subscriber.start()

        self.pubsub_management.move_subscription(subscription_id,
                                                 exchange_name='second_queue')

        xn_ids, _ = self.resource_registry.find_resources(
            restype=RT.ExchangeName, name='second_queue', id_only=True)
        subjects, _ = self.resource_registry.find_subjects(
            object=subscription_id,
            predicate=PRED.hasSubscription,
            id_only=True)

        self.assertEquals(len(subjects), 1)
        self.assertEquals(subjects[0], xn_ids[0])

        publisher = StandaloneStreamPublisher(stream_id, route)
        publisher.publish('verified')

        self.assertTrue(self.verified.wait(2))

        self.pubsub_management.deactivate_subscription(subscription_id)

        self.pubsub_management.delete_subscription(subscription_id)
        self.pubsub_management.delete_stream(stream_id)

    def test_queue_cleanup(self):
        stream_id, route = self.pubsub_management.create_stream(
            'test_stream', 'xp1')
        xn_objs, _ = self.resource_registry.find_resources(
            restype=RT.ExchangeName, name='queue1')
        for xn_obj in xn_objs:
            xn = self.container.ex_manager.create_xn_queue(xn_obj.name)
            xn.delete()
        subscription_id = self.pubsub_management.create_subscription(
            'queue1', stream_ids=[stream_id])
        xn_ids, _ = self.resource_registry.find_resources(
            restype=RT.ExchangeName, name='queue1')
        self.assertEquals(len(xn_ids), 1)

        self.pubsub_management.delete_subscription(subscription_id)

        xn_ids, _ = self.resource_registry.find_resources(
            restype=RT.ExchangeName, name='queue1')
        self.assertEquals(len(xn_ids), 0)

    def test_activation_and_deactivation(self):
        stream_id, route = self.pubsub_management.create_stream(
            'stream1', 'xp1')
        subscription_id = self.pubsub_management.create_subscription(
            'sub1', stream_ids=[stream_id])

        self.check1 = Event()

        def verifier(m, r, s):
            self.check1.set()

        subscriber = StandaloneStreamSubscriber('sub1', verifier)
        subscriber.start()

        publisher = StandaloneStreamPublisher(stream_id, route)
        publisher.publish('should not receive')

        self.assertFalse(self.check1.wait(0.25))

        self.pubsub_management.activate_subscription(subscription_id)

        publisher.publish('should receive')
        self.assertTrue(self.check1.wait(2))

        self.check1.clear()
        self.assertFalse(self.check1.is_set())

        self.pubsub_management.deactivate_subscription(subscription_id)

        publisher.publish('should not receive')
        self.assertFalse(self.check1.wait(0.5))

        self.pubsub_management.activate_subscription(subscription_id)

        publisher.publish('should receive')
        self.assertTrue(self.check1.wait(2))

        subscriber.stop()

        self.pubsub_management.deactivate_subscription(subscription_id)
        self.pubsub_management.delete_subscription(subscription_id)
        self.pubsub_management.delete_stream(stream_id)

    def test_topic_crud(self):

        topic_id = self.pubsub_management.create_topic(
            name='test_topic', exchange_point='test_xp')
        self.exchange_cleanup.append('test_xp')

        topic = self.pubsub_management.read_topic(topic_id)

        self.assertEquals(topic.name, 'test_topic')
        self.assertEquals(topic.exchange_point, 'test_xp')

        self.pubsub_management.delete_topic(topic_id)
        with self.assertRaises(NotFound):
            self.pubsub_management.read_topic(topic_id)

    def test_full_pubsub(self):

        self.sub1_sat = Event()
        self.sub2_sat = Event()

        def subscriber1(m, r, s):
            self.sub1_sat.set()

        def subscriber2(m, r, s):
            self.sub2_sat.set()

        sub1 = StandaloneStreamSubscriber('sub1', subscriber1)
        self.queue_cleanup.append(sub1.xn.queue)
        sub1.start()

        sub2 = StandaloneStreamSubscriber('sub2', subscriber2)
        self.queue_cleanup.append(sub2.xn.queue)
        sub2.start()

        log_topic = self.pubsub_management.create_topic(
            'instrument_logs', exchange_point='instruments')
        science_topic = self.pubsub_management.create_topic(
            'science_data', exchange_point='instruments')
        events_topic = self.pubsub_management.create_topic(
            'notifications', exchange_point='events')

        log_stream, route = self.pubsub_management.create_stream(
            'instrument1-logs',
            topic_ids=[log_topic],
            exchange_point='instruments')
        ctd_stream, route = self.pubsub_management.create_stream(
            'instrument1-ctd',
            topic_ids=[science_topic],
            exchange_point='instruments')
        event_stream, route = self.pubsub_management.create_stream(
            'notifications', topic_ids=[events_topic], exchange_point='events')
        raw_stream, route = self.pubsub_management.create_stream(
            'temp', exchange_point='global.data')
        self.exchange_cleanup.extend(['instruments', 'events', 'global.data'])

        subscription1 = self.pubsub_management.create_subscription(
            'subscription1',
            stream_ids=[log_stream, event_stream],
            exchange_name='sub1')
        subscription2 = self.pubsub_management.create_subscription(
            'subscription2',
            exchange_points=['global.data'],
            stream_ids=[ctd_stream],
            exchange_name='sub2')

        self.pubsub_management.activate_subscription(subscription1)
        self.pubsub_management.activate_subscription(subscription2)

        self.publish_on_stream(log_stream, 1)
        self.assertTrue(self.sub1_sat.wait(4))
        self.assertFalse(self.sub2_sat.is_set())

        self.publish_on_stream(raw_stream, 1)
        self.assertTrue(self.sub1_sat.wait(4))

        sub1.stop()
        sub2.stop()

    def test_topic_craziness(self):

        self.msg_queue = Queue()

        def subscriber1(m, r, s):
            self.msg_queue.put(m)

        sub1 = StandaloneStreamSubscriber('sub1', subscriber1)
        self.queue_cleanup.append(sub1.xn.queue)
        sub1.start()

        topic1 = self.pubsub_management.create_topic('topic1',
                                                     exchange_point='xp1')
        topic2 = self.pubsub_management.create_topic('topic2',
                                                     exchange_point='xp1',
                                                     parent_topic_id=topic1)
        topic3 = self.pubsub_management.create_topic('topic3',
                                                     exchange_point='xp1',
                                                     parent_topic_id=topic1)
        topic4 = self.pubsub_management.create_topic('topic4',
                                                     exchange_point='xp1',
                                                     parent_topic_id=topic2)
        topic5 = self.pubsub_management.create_topic('topic5',
                                                     exchange_point='xp1',
                                                     parent_topic_id=topic2)
        topic6 = self.pubsub_management.create_topic('topic6',
                                                     exchange_point='xp1',
                                                     parent_topic_id=topic3)
        topic7 = self.pubsub_management.create_topic('topic7',
                                                     exchange_point='xp1',
                                                     parent_topic_id=topic3)

        # Tree 2
        topic8 = self.pubsub_management.create_topic('topic8',
                                                     exchange_point='xp2')
        topic9 = self.pubsub_management.create_topic('topic9',
                                                     exchange_point='xp2',
                                                     parent_topic_id=topic8)
        topic10 = self.pubsub_management.create_topic('topic10',
                                                      exchange_point='xp2',
                                                      parent_topic_id=topic9)
        topic11 = self.pubsub_management.create_topic('topic11',
                                                      exchange_point='xp2',
                                                      parent_topic_id=topic9)
        topic12 = self.pubsub_management.create_topic('topic12',
                                                      exchange_point='xp2',
                                                      parent_topic_id=topic11)
        topic13 = self.pubsub_management.create_topic('topic13',
                                                      exchange_point='xp2',
                                                      parent_topic_id=topic11)
        self.exchange_cleanup.extend(['xp1', 'xp2'])

        stream1_id, route = self.pubsub_management.create_stream(
            'stream1',
            topic_ids=[topic7, topic4, topic5],
            exchange_point='xp1')
        stream2_id, route = self.pubsub_management.create_stream(
            'stream2', topic_ids=[topic8], exchange_point='xp2')
        stream3_id, route = self.pubsub_management.create_stream(
            'stream3', topic_ids=[topic10, topic13], exchange_point='xp2')
        stream4_id, route = self.pubsub_management.create_stream(
            'stream4', topic_ids=[topic9], exchange_point='xp2')
        stream5_id, route = self.pubsub_management.create_stream(
            'stream5', topic_ids=[topic11], exchange_point='xp2')

        subscription1 = self.pubsub_management.create_subscription(
            'sub1', topic_ids=[topic1])
        subscription2 = self.pubsub_management.create_subscription(
            'sub2', topic_ids=[topic8], exchange_name='sub1')
        subscription3 = self.pubsub_management.create_subscription(
            'sub3', topic_ids=[topic9], exchange_name='sub1')
        subscription4 = self.pubsub_management.create_subscription(
            'sub4',
            topic_ids=[topic10, topic13, topic11],
            exchange_name='sub1')
        #--------------------------------------------------------------------------------
        self.pubsub_management.activate_subscription(subscription1)

        self.publish_on_stream(stream1_id, 1)

        self.assertEquals(self.msg_queue.get(timeout=10), 1)
        with self.assertRaises(Empty):
            self.msg_queue.get(timeout=0.1)

        self.pubsub_management.deactivate_subscription(subscription1)
        self.pubsub_management.delete_subscription(subscription1)
        #--------------------------------------------------------------------------------
        self.pubsub_management.activate_subscription(subscription2)

        self.publish_on_stream(stream2_id, 2)
        self.assertEquals(self.msg_queue.get(timeout=10), 2)
        with self.assertRaises(Empty):
            self.msg_queue.get(timeout=0.1)

        self.pubsub_management.deactivate_subscription(subscription2)
        self.pubsub_management.delete_subscription(subscription2)

        #--------------------------------------------------------------------------------
        self.pubsub_management.activate_subscription(subscription3)

        self.publish_on_stream(stream2_id, 3)
        with self.assertRaises(Empty):
            self.msg_queue.get(timeout=0.3)

        self.publish_on_stream(stream3_id, 4)
        self.assertEquals(self.msg_queue.get(timeout=10), 4)

        self.pubsub_management.deactivate_subscription(subscription3)
        self.pubsub_management.delete_subscription(subscription3)

        #--------------------------------------------------------------------------------
        self.pubsub_management.activate_subscription(subscription4)

        self.publish_on_stream(stream4_id, 5)
        with self.assertRaises(Empty):
            self.msg_queue.get(timeout=0.3)

        self.publish_on_stream(stream5_id, 6)
        self.assertEquals(self.msg_queue.get(timeout=10), 6)
        with self.assertRaises(Empty):
            self.msg_queue.get(timeout=0.3)

        self.pubsub_management.deactivate_subscription(subscription4)
        self.pubsub_management.delete_subscription(subscription4)

        #--------------------------------------------------------------------------------
        sub1.stop()

        self.pubsub_management.delete_topic(topic13)
        self.pubsub_management.delete_topic(topic12)
        self.pubsub_management.delete_topic(topic11)
        self.pubsub_management.delete_topic(topic10)
        self.pubsub_management.delete_topic(topic9)
        self.pubsub_management.delete_topic(topic8)
        self.pubsub_management.delete_topic(topic7)
        self.pubsub_management.delete_topic(topic6)
        self.pubsub_management.delete_topic(topic5)
        self.pubsub_management.delete_topic(topic4)
        self.pubsub_management.delete_topic(topic3)
        self.pubsub_management.delete_topic(topic2)
        self.pubsub_management.delete_topic(topic1)

        self.pubsub_management.delete_stream(stream1_id)
        self.pubsub_management.delete_stream(stream2_id)
        self.pubsub_management.delete_stream(stream3_id)
        self.pubsub_management.delete_stream(stream4_id)
        self.pubsub_management.delete_stream(stream5_id)

    def _get_pdict(self, filter_values):
        t_ctxt = ParameterContext(
            'TIME', param_type=QuantityType(value_encoding=np.dtype('int64')))
        t_ctxt.uom = 'seconds since 01-01-1900'
        t_ctxt_id = self.dataset_management.create_parameter_context(
            name='TIME',
            parameter_context=t_ctxt.dump(),
            parameter_type='quantity<int64>',
            unit_of_measure=t_ctxt.uom)

        lat_ctxt = ParameterContext(
            'LAT',
            param_type=ConstantType(
                QuantityType(value_encoding=np.dtype('float32'))),
            fill_value=-9999)
        lat_ctxt.axis = AxisTypeEnum.LAT
        lat_ctxt.uom = 'degree_north'
        lat_ctxt_id = self.dataset_management.create_parameter_context(
            name='LAT',
            parameter_context=lat_ctxt.dump(),
            parameter_type='quantity<float32>',
            unit_of_measure=lat_ctxt.uom)

        lon_ctxt = ParameterContext(
            'LON',
            param_type=ConstantType(
                QuantityType(value_encoding=np.dtype('float32'))),
            fill_value=-9999)
        lon_ctxt.axis = AxisTypeEnum.LON
        lon_ctxt.uom = 'degree_east'
        lon_ctxt_id = self.dataset_management.create_parameter_context(
            name='LON',
            parameter_context=lon_ctxt.dump(),
            parameter_type='quantity<float32>',
            unit_of_measure=lon_ctxt.uom)

        # Independent Parameters
        # Temperature - values expected to be the decimal results of conversion from hex
        temp_ctxt = ParameterContext(
            'TEMPWAT_L0',
            param_type=QuantityType(value_encoding=np.dtype('float32')),
            fill_value=-9999)
        temp_ctxt.uom = 'deg_C'
        temp_ctxt_id = self.dataset_management.create_parameter_context(
            name='TEMPWAT_L0',
            parameter_context=temp_ctxt.dump(),
            parameter_type='quantity<float32>',
            unit_of_measure=temp_ctxt.uom)

        # Conductivity - values expected to be the decimal results of conversion from hex
        cond_ctxt = ParameterContext(
            'CONDWAT_L0',
            param_type=QuantityType(value_encoding=np.dtype('float32')),
            fill_value=-9999)
        cond_ctxt.uom = 'S m-1'
        cond_ctxt_id = self.dataset_management.create_parameter_context(
            name='CONDWAT_L0',
            parameter_context=cond_ctxt.dump(),
            parameter_type='quantity<float32>',
            unit_of_measure=cond_ctxt.uom)

        # Pressure - values expected to be the decimal results of conversion from hex
        press_ctxt = ParameterContext(
            'PRESWAT_L0',
            param_type=QuantityType(value_encoding=np.dtype('float32')),
            fill_value=-9999)
        press_ctxt.uom = 'dbar'
        press_ctxt_id = self.dataset_management.create_parameter_context(
            name='PRESWAT_L0',
            parameter_context=press_ctxt.dump(),
            parameter_type='quantity<float32>',
            unit_of_measure=press_ctxt.uom)

        # Dependent Parameters

        # TEMPWAT_L1 = (TEMPWAT_L0 / 10000) - 10
        tl1_func = '(T / 10000) - 10'
        tl1_pmap = {'T': 'TEMPWAT_L0'}
        expr = NumexprFunction('TEMPWAT_L1',
                               tl1_func, ['T'],
                               param_map=tl1_pmap)
        tempL1_ctxt = ParameterContext(
            'TEMPWAT_L1',
            param_type=ParameterFunctionType(function=expr),
            variability=VariabilityEnum.TEMPORAL)
        tempL1_ctxt.uom = 'deg_C'
        tempL1_ctxt_id = self.dataset_management.create_parameter_context(
            name=tempL1_ctxt.name,
            parameter_context=tempL1_ctxt.dump(),
            parameter_type='pfunc',
            unit_of_measure=tempL1_ctxt.uom)

        # CONDWAT_L1 = (CONDWAT_L0 / 100000) - 0.5
        cl1_func = '(C / 100000) - 0.5'
        cl1_pmap = {'C': 'CONDWAT_L0'}
        expr = NumexprFunction('CONDWAT_L1',
                               cl1_func, ['C'],
                               param_map=cl1_pmap)
        condL1_ctxt = ParameterContext(
            'CONDWAT_L1',
            param_type=ParameterFunctionType(function=expr),
            variability=VariabilityEnum.TEMPORAL)
        condL1_ctxt.uom = 'S m-1'
        condL1_ctxt_id = self.dataset_management.create_parameter_context(
            name=condL1_ctxt.name,
            parameter_context=condL1_ctxt.dump(),
            parameter_type='pfunc',
            unit_of_measure=condL1_ctxt.uom)

        # Equation uses p_range, which is a calibration coefficient - Fixing to 679.34040721
        #   PRESWAT_L1 = (PRESWAT_L0 * p_range / (0.85 * 65536)) - (0.05 * p_range)
        pl1_func = '(P * p_range / (0.85 * 65536)) - (0.05 * p_range)'
        pl1_pmap = {'P': 'PRESWAT_L0', 'p_range': 679.34040721}
        expr = NumexprFunction('PRESWAT_L1',
                               pl1_func, ['P', 'p_range'],
                               param_map=pl1_pmap)
        presL1_ctxt = ParameterContext(
            'PRESWAT_L1',
            param_type=ParameterFunctionType(function=expr),
            variability=VariabilityEnum.TEMPORAL)
        presL1_ctxt.uom = 'S m-1'
        presL1_ctxt_id = self.dataset_management.create_parameter_context(
            name=presL1_ctxt.name,
            parameter_context=presL1_ctxt.dump(),
            parameter_type='pfunc',
            unit_of_measure=presL1_ctxt.uom)

        # Density & practical salinity calucluated using the Gibbs Seawater library - available via python-gsw project:
        #       https://code.google.com/p/python-gsw/ & http://pypi.python.org/pypi/gsw/3.0.1

        # PRACSAL = gsw.SP_from_C((CONDWAT_L1 * 10), TEMPWAT_L1, PRESWAT_L1)
        owner = 'gsw'
        sal_func = 'SP_from_C'
        sal_arglist = ['C', 't', 'p']
        sal_pmap = {
            'C':
            NumexprFunction('CONDWAT_L1*10',
                            'C*10', ['C'],
                            param_map={'C': 'CONDWAT_L1'}),
            't':
            'TEMPWAT_L1',
            'p':
            'PRESWAT_L1'
        }
        sal_kwargmap = None
        expr = PythonFunction('PRACSAL', owner, sal_func, sal_arglist,
                              sal_kwargmap, sal_pmap)
        sal_ctxt = ParameterContext('PRACSAL',
                                    param_type=ParameterFunctionType(expr),
                                    variability=VariabilityEnum.TEMPORAL)
        sal_ctxt.uom = 'g kg-1'
        sal_ctxt_id = self.dataset_management.create_parameter_context(
            name=sal_ctxt.name,
            parameter_context=sal_ctxt.dump(),
            parameter_type='pfunc',
            unit_of_measure=sal_ctxt.uom)

        # absolute_salinity = gsw.SA_from_SP(PRACSAL, PRESWAT_L1, longitude, latitude)
        # conservative_temperature = gsw.CT_from_t(absolute_salinity, TEMPWAT_L1, PRESWAT_L1)
        # DENSITY = gsw.rho(absolute_salinity, conservative_temperature, PRESWAT_L1)
        owner = 'gsw'
        abs_sal_expr = PythonFunction('abs_sal', owner, 'SA_from_SP',
                                      ['PRACSAL', 'PRESWAT_L1', 'LON', 'LAT'])
        cons_temp_expr = PythonFunction(
            'cons_temp', owner, 'CT_from_t',
            [abs_sal_expr, 'TEMPWAT_L1', 'PRESWAT_L1'])
        dens_expr = PythonFunction(
            'DENSITY', owner, 'rho',
            [abs_sal_expr, cons_temp_expr, 'PRESWAT_L1'])
        dens_ctxt = ParameterContext(
            'DENSITY',
            param_type=ParameterFunctionType(dens_expr),
            variability=VariabilityEnum.TEMPORAL)
        dens_ctxt.uom = 'kg m-3'
        dens_ctxt_id = self.dataset_management.create_parameter_context(
            name=dens_ctxt.name,
            parameter_context=dens_ctxt.dump(),
            parameter_type='pfunc',
            unit_of_measure=dens_ctxt.uom)

        ids = [
            t_ctxt_id, lat_ctxt_id, lon_ctxt_id, temp_ctxt_id, cond_ctxt_id,
            press_ctxt_id, tempL1_ctxt_id, condL1_ctxt_id, presL1_ctxt_id,
            sal_ctxt_id, dens_ctxt_id
        ]
        contexts = [
            t_ctxt, lat_ctxt, lon_ctxt, temp_ctxt, cond_ctxt, press_ctxt,
            tempL1_ctxt, condL1_ctxt, presL1_ctxt, sal_ctxt, dens_ctxt
        ]
        context_ids = [
            ids[i] for i, ctxt in enumerate(contexts)
            if ctxt.name in filter_values
        ]
        pdict_name = '_'.join(
            [ctxt.name for ctxt in contexts if ctxt.name in filter_values])

        try:
            self.pdicts[pdict_name]
            return self.pdicts[pdict_name]
        except KeyError:
            pdict_id = self.dataset_management.create_parameter_dictionary(
                pdict_name,
                parameter_context_ids=context_ids,
                temporal_context='time')
            self.pdicts[pdict_name] = pdict_id
            return pdict_id
class TestDMEnd2End(IonIntegrationTestCase):
    def setUp(self):  # Love the non pep-8 convention
        self._start_container()

        self.container.start_rel_from_url("res/deploy/r2deploy.yml")

        self.process_dispatcher = ProcessDispatcherServiceClient()
        self.pubsub_management = PubsubManagementServiceClient()
        self.resource_registry = ResourceRegistryServiceClient()
        self.dataset_management = DatasetManagementServiceClient()
        self.ingestion_management = IngestionManagementServiceClient()
        self.data_retriever = DataRetrieverServiceClient()
        self.pids = []
        self.event = Event()
        self.exchange_space_name = "test_granules"
        self.exchange_point_name = "science_data"
        self.i = 0

        self.purge_queues()
        self.queue_buffer = []
        self.streams = []
        self.addCleanup(self.stop_all_ingestion)

    def purge_queues(self):
        xn = self.container.ex_manager.create_xn_queue("science_granule_ingestion")
        xn.purge()

    def tearDown(self):
        self.purge_queues()
        for pid in self.pids:
            self.container.proc_manager.terminate_process(pid)
        IngestionManagementIntTest.clean_subscriptions()
        for queue in self.queue_buffer:
            if isinstance(queue, ExchangeNameQueue):
                queue.delete()
            elif isinstance(queue, str):
                xn = self.container.ex_manager.create_xn_queue(queue)
                xn.delete()

    # --------------------------------------------------------------------------------
    # Helper/Utility methods
    # --------------------------------------------------------------------------------

    def create_dataset(self, parameter_dict_id=""):
        """
        Creates a time-series dataset
        """
        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()
        if not parameter_dict_id:
            parameter_dict_id = self.dataset_management.read_parameter_dictionary_by_name(
                "ctd_parsed_param_dict", id_only=True
            )

        dataset_id = self.dataset_management.create_dataset(
            "test_dataset_%i" % self.i,
            parameter_dictionary_id=parameter_dict_id,
            spatial_domain=sdom,
            temporal_domain=tdom,
        )
        return dataset_id

    def get_datastore(self, dataset_id):
        """
        Gets an instance of the datastore
            This method is primarily used to defeat a bug where integration tests in multiple containers may sometimes 
            delete a CouchDB datastore and the other containers are unaware of the new state of the datastore.
        """
        dataset = self.dataset_management.read_dataset(dataset_id)
        datastore_name = dataset.datastore_name
        datastore = self.container.datastore_manager.get_datastore(datastore_name, DataStore.DS_PROFILE.SCIDATA)
        return datastore

    def get_ingestion_config(self):
        """
        Grab the ingestion configuration from the resource registry
        """
        # The ingestion configuration should have been created by the bootstrap service
        # which is configured through r2deploy.yml

        ingest_configs, _ = self.resource_registry.find_resources(restype=RT.IngestionConfiguration, id_only=True)
        return ingest_configs[0]

    def launch_producer(self, stream_id=""):
        """
        Launch the producer
        """

        pid = self.container.spawn_process(
            "better_data_producer",
            "ion.processes.data.example_data_producer",
            "BetterDataProducer",
            {"process": {"stream_id": stream_id}},
        )

        self.pids.append(pid)

    def make_simple_dataset(self):
        """
        Makes a stream, a stream definition and a dataset, the essentials for most of these tests
        """
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name("ctd_parsed_param_dict", id_only=True)
        stream_def_id = self.pubsub_management.create_stream_definition("ctd data", parameter_dictionary_id=pdict_id)
        stream_id, route = self.pubsub_management.create_stream(
            "ctd stream %i" % self.i, "xp1", stream_definition_id=stream_def_id
        )

        dataset_id = self.create_dataset(pdict_id)

        self.get_datastore(dataset_id)
        self.i += 1
        return stream_id, route, stream_def_id, dataset_id

    def publish_hifi(self, stream_id, stream_route, offset=0):
        """
        Publish deterministic data
        """

        pub = StandaloneStreamPublisher(stream_id, stream_route)

        stream_def = self.pubsub_management.read_stream_definition(stream_id=stream_id)
        stream_def_id = stream_def._id
        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
        rdt["time"] = np.arange(10) + (offset * 10)
        rdt["temp"] = np.arange(10) + (offset * 10)
        pub.publish(rdt.to_granule())

    def publish_fake_data(self, stream_id, route):
        """
        Make four granules
        """
        for i in xrange(4):
            self.publish_hifi(stream_id, route, i)

    def start_ingestion(self, stream_id, dataset_id):
        """
        Starts ingestion/persistence for a given dataset
        """
        ingest_config_id = self.get_ingestion_config()
        self.ingestion_management.persist_data_stream(
            stream_id=stream_id, ingestion_configuration_id=ingest_config_id, dataset_id=dataset_id
        )

    def stop_ingestion(self, stream_id):
        ingest_config_id = self.get_ingestion_config()
        self.ingestion_management.unpersist_data_stream(
            stream_id=stream_id, ingestion_configuration_id=ingest_config_id
        )

    def stop_all_ingestion(self):
        try:
            [self.stop_ingestion(sid) for sid in self.streams]
        except:
            pass

    def validate_granule_subscription(self, msg, route, stream_id):
        """
        Validation for granule format
        """
        if msg == {}:
            return
        rdt = RecordDictionaryTool.load_from_granule(msg)
        log.info("%s", rdt.pretty_print())
        self.assertIsInstance(msg, Granule, "Message is improperly formatted. (%s)" % type(msg))
        self.event.set()

    def wait_until_we_have_enough_granules(self, dataset_id="", data_size=40):
        """
        Loops until there is a sufficient amount of data in the dataset
        """
        done = False
        with gevent.Timeout(40):
            while not done:
                extents = self.dataset_management.dataset_extents(dataset_id, "time")[0]
                granule = self.data_retriever.retrieve_last_data_points(dataset_id, 1)
                rdt = RecordDictionaryTool.load_from_granule(granule)
                if rdt["time"] and rdt["time"][0] != rdt._pdict.get_context("time").fill_value and extents >= data_size:
                    done = True
                else:
                    gevent.sleep(0.2)

    # --------------------------------------------------------------------------------
    # Test Methods
    # --------------------------------------------------------------------------------

    @attr("SMOKE")
    def test_dm_end_2_end(self):
        # --------------------------------------------------------------------------------
        # Set up a stream and have a mock instrument (producer) send data
        # --------------------------------------------------------------------------------
        self.event.clear()

        # Get a precompiled parameter dictionary with basic ctd fields
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name("ctd_parsed_param_dict", id_only=True)
        context_ids = self.dataset_management.read_parameter_contexts(pdict_id, id_only=True)

        # Add a field that supports binary data input.
        bin_context = ParameterContext("binary", param_type=ArrayType())
        context_ids.append(self.dataset_management.create_parameter_context("binary", bin_context.dump()))
        # Add another field that supports dictionary elements.
        rec_context = ParameterContext("records", param_type=RecordType())
        context_ids.append(self.dataset_management.create_parameter_context("records", rec_context.dump()))

        pdict_id = self.dataset_management.create_parameter_dictionary(
            "replay_pdict", parameter_context_ids=context_ids, temporal_context="time"
        )

        stream_definition = self.pubsub_management.create_stream_definition(
            "ctd data", parameter_dictionary_id=pdict_id
        )

        stream_id, route = self.pubsub_management.create_stream(
            "producer", exchange_point=self.exchange_point_name, stream_definition_id=stream_definition
        )

        # --------------------------------------------------------------------------------
        # Start persisting the data on the stream
        # - Get the ingestion configuration from the resource registry
        # - Create the dataset
        # - call persist_data_stream to setup the subscription for the ingestion workers
        #   on the stream that you specify which causes the data to be persisted
        # --------------------------------------------------------------------------------

        ingest_config_id = self.get_ingestion_config()
        dataset_id = self.create_dataset(pdict_id)
        self.ingestion_management.persist_data_stream(
            stream_id=stream_id, ingestion_configuration_id=ingest_config_id, dataset_id=dataset_id
        )

        # --------------------------------------------------------------------------------
        # Now the granules are ingesting and persisted
        # --------------------------------------------------------------------------------

        self.launch_producer(stream_id)
        self.wait_until_we_have_enough_granules(dataset_id, 40)

        # --------------------------------------------------------------------------------
        # Now get the data in one chunk using an RPC Call to start_retreive
        # --------------------------------------------------------------------------------

        replay_data = self.data_retriever.retrieve(dataset_id)
        self.assertIsInstance(replay_data, Granule)
        rdt = RecordDictionaryTool.load_from_granule(replay_data)
        self.assertTrue((rdt["time"][:10] == np.arange(10)).all(), "%s" % rdt["time"][:])
        self.assertTrue((rdt["binary"][:10] == np.array(["hi"] * 10, dtype="object")).all())

        # --------------------------------------------------------------------------------
        # Now to try the streamed approach
        # --------------------------------------------------------------------------------
        replay_stream_id, replay_route = self.pubsub_management.create_stream(
            "replay_out", exchange_point=self.exchange_point_name, stream_definition_id=stream_definition
        )
        self.replay_id, process_id = self.data_retriever.define_replay(
            dataset_id=dataset_id, stream_id=replay_stream_id
        )
        log.info("Process ID: %s", process_id)

        replay_client = ReplayClient(process_id)

        # --------------------------------------------------------------------------------
        # Create the listening endpoint for the the retriever to talk to
        # --------------------------------------------------------------------------------
        xp = self.container.ex_manager.create_xp(self.exchange_point_name)
        subscriber = StandaloneStreamSubscriber(self.exchange_space_name, self.validate_granule_subscription)
        self.queue_buffer.append(self.exchange_space_name)
        subscriber.start()
        subscriber.xn.bind(replay_route.routing_key, xp)

        self.data_retriever.start_replay_agent(self.replay_id)

        self.assertTrue(replay_client.await_agent_ready(5), "The process never launched")
        replay_client.start_replay()

        self.assertTrue(self.event.wait(10))
        subscriber.stop()

        self.data_retriever.cancel_replay_agent(self.replay_id)

        # --------------------------------------------------------------------------------
        # Test the slicing capabilities
        # --------------------------------------------------------------------------------

        granule = self.data_retriever.retrieve(dataset_id=dataset_id, query={"tdoa": slice(0, 5)})
        rdt = RecordDictionaryTool.load_from_granule(granule)
        b = rdt["time"] == np.arange(5)
        self.assertTrue(b.all() if not isinstance(b, bool) else b)
        self.streams.append(stream_id)
        self.stop_ingestion(stream_id)

    @unittest.skip("Doesnt work")
    @attr("LOCOINT")
    @unittest.skipIf(os.getenv("CEI_LAUNCH_TEST", False), "Skip test while in CEI LAUNCH mode")
    def test_replay_pause(self):
        # Get a precompiled parameter dictionary with basic ctd fields
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name("ctd_parsed_param_dict", id_only=True)
        context_ids = self.dataset_management.read_parameter_contexts(pdict_id, id_only=True)

        # Add a field that supports binary data input.
        bin_context = ParameterContext("binary", param_type=ArrayType())
        context_ids.append(self.dataset_management.create_parameter_context("binary", bin_context.dump()))
        # Add another field that supports dictionary elements.
        rec_context = ParameterContext("records", param_type=RecordType())
        context_ids.append(self.dataset_management.create_parameter_context("records", rec_context.dump()))

        pdict_id = self.dataset_management.create_parameter_dictionary(
            "replay_pdict", parameter_context_ids=context_ids, temporal_context="time"
        )

        stream_def_id = self.pubsub_management.create_stream_definition(
            "replay_stream", parameter_dictionary_id=pdict_id
        )
        replay_stream, replay_route = self.pubsub_management.create_stream(
            "replay", "xp1", stream_definition_id=stream_def_id
        )
        dataset_id = self.create_dataset(pdict_id)
        scov = DatasetManagementService._get_coverage(dataset_id)

        bb = CoverageCraft(scov)
        bb.rdt["time"] = np.arange(100)
        bb.rdt["temp"] = np.random.random(100) + 30
        bb.sync_with_granule()

        DatasetManagementService._persist_coverage(
            dataset_id, bb.coverage
        )  # This invalidates it for multi-host configurations
        # Set up the subscriber to verify the data
        subscriber = StandaloneStreamSubscriber(self.exchange_space_name, self.validate_granule_subscription)
        xp = self.container.ex_manager.create_xp("xp1")
        self.queue_buffer.append(self.exchange_space_name)
        subscriber.start()
        subscriber.xn.bind(replay_route.routing_key, xp)

        # Set up the replay agent and the client wrapper

        # 1) Define the Replay (dataset and stream to publish on)
        self.replay_id, process_id = self.data_retriever.define_replay(dataset_id=dataset_id, stream_id=replay_stream)
        # 2) Make a client to the interact with the process (optionall provide it a process to bind with)
        replay_client = ReplayClient(process_id)
        # 3) Start the agent (launch the process)
        self.data_retriever.start_replay_agent(self.replay_id)
        # 4) Start replaying...
        replay_client.start_replay()

        # Wait till we get some granules
        self.assertTrue(self.event.wait(5))

        # We got granules, pause the replay, clear the queue and allow the process to finish consuming
        replay_client.pause_replay()
        gevent.sleep(1)
        subscriber.xn.purge()
        self.event.clear()

        # Make sure there's no remaining messages being consumed
        self.assertFalse(self.event.wait(1))

        # Resume the replay and wait until we start getting granules again
        replay_client.resume_replay()
        self.assertTrue(self.event.wait(5))

        # Stop the replay, clear the queues
        replay_client.stop_replay()
        gevent.sleep(1)
        subscriber.xn.purge()
        self.event.clear()

        # Make sure that it did indeed stop
        self.assertFalse(self.event.wait(1))

        subscriber.stop()

    def test_retrieve_and_transform(self):
        # Make a simple dataset and start ingestion, pretty standard stuff.
        ctd_stream_id, route, stream_def_id, dataset_id = self.make_simple_dataset()
        self.start_ingestion(ctd_stream_id, dataset_id)

        # Stream definition for the salinity data
        salinity_pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            "ctd_parsed_param_dict", id_only=True
        )
        sal_stream_def_id = self.pubsub_management.create_stream_definition(
            "sal data", parameter_dictionary_id=salinity_pdict_id
        )

        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
        rdt["time"] = np.arange(10)
        rdt["temp"] = np.random.randn(10) * 10 + 30
        rdt["conductivity"] = np.random.randn(10) * 2 + 10
        rdt["pressure"] = np.random.randn(10) * 1 + 12

        publisher = StandaloneStreamPublisher(ctd_stream_id, route)
        publisher.publish(rdt.to_granule())

        rdt["time"] = np.arange(10, 20)

        publisher.publish(rdt.to_granule())

        self.wait_until_we_have_enough_granules(dataset_id, 20)

        granule = self.data_retriever.retrieve(
            dataset_id,
            None,
            None,
            "ion.processes.data.transforms.ctd.ctd_L2_salinity",
            "CTDL2SalinityTransformAlgorithm",
            kwargs=dict(params=sal_stream_def_id),
        )
        rdt = RecordDictionaryTool.load_from_granule(granule)
        for i in rdt["salinity"]:
            self.assertNotEquals(i, 0)
        self.streams.append(ctd_stream_id)
        self.stop_ingestion(ctd_stream_id)

    def test_last_granule(self):
        stream_id, route, stream_def_id, dataset_id = self.make_simple_dataset()
        self.start_ingestion(stream_id, dataset_id)

        self.publish_hifi(stream_id, route, 0)
        self.publish_hifi(stream_id, route, 1)

        self.wait_until_we_have_enough_granules(dataset_id, 20)  # I just need two

        success = False

        def verifier():
            replay_granule = self.data_retriever.retrieve_last_data_points(dataset_id, 10)

            rdt = RecordDictionaryTool.load_from_granule(replay_granule)

            comp = rdt["time"] == np.arange(10) + 10
            if not isinstance(comp, bool):
                return comp.all()
            return False

        success = poll(verifier)

        self.assertTrue(success)

        success = False

        def verify_points():
            replay_granule = self.data_retriever.retrieve_last_data_points(dataset_id, 5)

            rdt = RecordDictionaryTool.load_from_granule(replay_granule)

            comp = rdt["time"] == np.arange(15, 20)
            if not isinstance(comp, bool):
                return comp.all()
            return False

        success = poll(verify_points)

        self.assertTrue(success)
        self.streams.append(stream_id)
        self.stop_ingestion(stream_id)

    def test_replay_with_parameters(self):
        # --------------------------------------------------------------------------------
        # Create the configurations and the dataset
        # --------------------------------------------------------------------------------
        # Get a precompiled parameter dictionary with basic ctd fields
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name("ctd_parsed_param_dict", id_only=True)
        context_ids = self.dataset_management.read_parameter_contexts(pdict_id, id_only=True)

        # Add a field that supports binary data input.
        bin_context = ParameterContext("binary", param_type=ArrayType())
        context_ids.append(self.dataset_management.create_parameter_context("binary", bin_context.dump()))
        # Add another field that supports dictionary elements.
        rec_context = ParameterContext("records", param_type=RecordType())
        context_ids.append(self.dataset_management.create_parameter_context("records", rec_context.dump()))

        pdict_id = self.dataset_management.create_parameter_dictionary(
            "replay_pdict", parameter_context_ids=context_ids, temporal_context="time"
        )

        stream_def_id = self.pubsub_management.create_stream_definition(
            "replay_stream", parameter_dictionary_id=pdict_id
        )

        stream_id, route = self.pubsub_management.create_stream(
            "replay_with_params", exchange_point=self.exchange_point_name, stream_definition_id=stream_def_id
        )
        config_id = self.get_ingestion_config()
        dataset_id = self.create_dataset(pdict_id)
        self.ingestion_management.persist_data_stream(
            stream_id=stream_id, ingestion_configuration_id=config_id, dataset_id=dataset_id
        )

        # --------------------------------------------------------------------------------
        # Coerce the datastore into existence (beats race condition)
        # --------------------------------------------------------------------------------
        self.get_datastore(dataset_id)

        self.launch_producer(stream_id)

        self.wait_until_we_have_enough_granules(dataset_id, 40)

        query = {
            "start_time": 0 - 2208988800,
            "end_time": 20 - 2208988800,
            "stride_time": 2,
            "parameters": ["time", "temp"],
        }
        retrieved_data = self.data_retriever.retrieve(dataset_id=dataset_id, query=query)

        rdt = RecordDictionaryTool.load_from_granule(retrieved_data)
        comp = np.arange(0, 20, 2) == rdt["time"]
        self.assertTrue(comp.all(), "%s" % rdt.pretty_print())
        self.assertEquals(set(rdt.iterkeys()), set(["time", "temp"]))

        extents = self.dataset_management.dataset_extents(dataset_id=dataset_id, parameters=["time", "temp"])
        self.assertTrue(extents["time"] >= 20)
        self.assertTrue(extents["temp"] >= 20)

        self.streams.append(stream_id)
        self.stop_ingestion(stream_id)

    def test_repersist_data(self):
        stream_id, route, stream_def_id, dataset_id = self.make_simple_dataset()
        self.start_ingestion(stream_id, dataset_id)
        self.publish_hifi(stream_id, route, 0)
        self.publish_hifi(stream_id, route, 1)
        self.wait_until_we_have_enough_granules(dataset_id, 20)
        config_id = self.get_ingestion_config()
        self.ingestion_management.unpersist_data_stream(stream_id=stream_id, ingestion_configuration_id=config_id)
        self.ingestion_management.persist_data_stream(
            stream_id=stream_id, ingestion_configuration_id=config_id, dataset_id=dataset_id
        )
        self.publish_hifi(stream_id, route, 2)
        self.publish_hifi(stream_id, route, 3)
        self.wait_until_we_have_enough_granules(dataset_id, 40)
        success = False
        with gevent.timeout.Timeout(5):
            while not success:

                replay_granule = self.data_retriever.retrieve(dataset_id)

                rdt = RecordDictionaryTool.load_from_granule(replay_granule)

                comp = rdt["time"] == np.arange(0, 40)
                if not isinstance(comp, bool):
                    success = comp.all()
                gevent.sleep(1)

        self.assertTrue(success)
        self.streams.append(stream_id)
        self.stop_ingestion(stream_id)

    @attr("LOCOINT")
    @unittest.skipIf(
        os.getenv("CEI_LAUNCH_TEST", False),
        "Host requires file-system access to coverage files, CEI mode does not support.",
    )
    def test_correct_time(self):

        # There are 2208988800 seconds between Jan 1 1900 and Jan 1 1970, i.e.
        #  the conversion factor between unix and NTP time
        unix_now = np.floor(time.time())
        ntp_now = unix_now + 2208988800

        unix_ago = unix_now - 20
        ntp_ago = unix_ago + 2208988800

        stream_id, route, stream_def_id, dataset_id = self.make_simple_dataset()
        coverage = DatasetManagementService._get_coverage(dataset_id)
        coverage.insert_timesteps(20)
        coverage.set_parameter_values("time", np.arange(ntp_ago, ntp_now))

        temporal_bounds = self.dataset_management.dataset_temporal_bounds(dataset_id)

        self.assertTrue(np.abs(temporal_bounds[0] - unix_ago) < 2)
        self.assertTrue(np.abs(temporal_bounds[1] - unix_now) < 2)

    @attr("LOCOINT")
    @unittest.skipIf(
        os.getenv("CEI_LAUNCH_TEST", False),
        "Host requires file-system access to coverage files, CEI mode does not support.",
    )
    def test_empty_coverage_time(self):

        stream_id, route, stream_def_id, dataset_id = self.make_simple_dataset()
        coverage = DatasetManagementService._get_coverage(dataset_id)
        temporal_bounds = self.dataset_management.dataset_temporal_bounds(dataset_id)
        self.assertEquals([coverage.get_parameter_context("time").fill_value] * 2, temporal_bounds)

    @attr("LOCOINT")
    @unittest.skipIf(
        os.getenv("CEI_LAUNCH_TEST", False),
        "Host requires file-system access to coverage files, CEI mode does not support.",
    )
    def test_out_of_band_retrieve(self):
        # Setup the environemnt
        stream_id, route, stream_def_id, dataset_id = self.make_simple_dataset()
        self.start_ingestion(stream_id, dataset_id)

        # Fill the dataset
        self.publish_fake_data(stream_id, route)
        self.wait_until_we_have_enough_granules(dataset_id, 40)

        # Retrieve the data
        granule = DataRetrieverService.retrieve_oob(dataset_id)
        rdt = RecordDictionaryTool.load_from_granule(granule)
        self.assertTrue((rdt["time"] == np.arange(40)).all())

    @attr("LOCOINT")
    @unittest.skipIf(
        os.getenv("CEI_LAUNCH_TEST", False),
        "Host requires file-system access to coverage files, CEI mode does not support.",
    )
    def test_retrieve_cache(self):
        DataRetrieverService._refresh_interval = 1
        datasets = [self.make_simple_dataset() for i in xrange(10)]
        for stream_id, route, stream_def_id, dataset_id in datasets:
            coverage = DatasetManagementService._get_coverage(dataset_id)
            coverage.insert_timesteps(10)
            coverage.set_parameter_values("time", np.arange(10))
            coverage.set_parameter_values("temp", np.arange(10))

        # Verify cache hit and refresh
        dataset_ids = [i[3] for i in datasets]
        self.assertTrue(dataset_ids[0] not in DataRetrieverService._retrieve_cache)
        DataRetrieverService._get_coverage(dataset_ids[0])  # Hit the chache
        cov, age = DataRetrieverService._retrieve_cache[dataset_ids[0]]
        # Verify that it was hit and it's now in there
        self.assertTrue(dataset_ids[0] in DataRetrieverService._retrieve_cache)

        gevent.sleep(DataRetrieverService._refresh_interval + 0.2)

        DataRetrieverService._get_coverage(dataset_ids[0])  # Hit the chache
        cov, age2 = DataRetrieverService._retrieve_cache[dataset_ids[0]]
        self.assertTrue(age2 != age)

        for dataset_id in dataset_ids:
            DataRetrieverService._get_coverage(dataset_id)

        self.assertTrue(dataset_ids[0] not in DataRetrieverService._retrieve_cache)

        stream_id, route, stream_def, dataset_id = datasets[0]
        self.start_ingestion(stream_id, dataset_id)
        DataRetrieverService._get_coverage(dataset_id)

        self.assertTrue(dataset_id in DataRetrieverService._retrieve_cache)

        DataRetrieverService._refresh_interval = 100
        self.publish_hifi(stream_id, route, 1)
        self.wait_until_we_have_enough_granules(dataset_id, data_size=20)

        event = gevent.event.Event()
        with gevent.Timeout(20):
            while not event.wait(0.1):
                if dataset_id not in DataRetrieverService._retrieve_cache:
                    event.set()

        self.assertTrue(event.is_set())

    @unittest.skip("Outdated due to ingestion retry")
    @attr("LOCOINT")
    @unittest.skipIf(
        os.getenv("CEI_LAUNCH_TEST", False),
        "Host requires file-system access to coverage files, CEI mode does not support.",
    )
    def test_ingestion_failover(self):
        stream_id, route, stream_def_id, dataset_id = self.make_simple_dataset()
        self.start_ingestion(stream_id, dataset_id)

        event = Event()

        def cb(*args, **kwargs):
            event.set()

        sub = EventSubscriber(event_type="ExceptionEvent", callback=cb, origin="stream_exception")
        sub.start()

        self.publish_fake_data(stream_id, route)
        self.wait_until_we_have_enough_granules(dataset_id, 40)

        file_path = DatasetManagementService._get_coverage_path(dataset_id)
        master_file = os.path.join(file_path, "%s_master.hdf5" % dataset_id)

        with open(master_file, "w") as f:
            f.write("this will crash HDF")

        self.publish_hifi(stream_id, route, 5)

        self.assertTrue(event.wait(10))

        sub.stop()
class TestCTDTransformsIntegration(IonIntegrationTestCase):
    pdict_id = None

    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()
        #container = Container()
        #print 'starting container'
        #container.start()
        #print 'started container

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        print 'started services'

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(
            node=self.container.node)
        self.pubsubclient = PubsubManagementServiceClient(
            node=self.container.node)
        self.ingestclient = IngestionManagementServiceClient(
            node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(
            node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(
            node=self.container.node)
        self.datasetclient = DatasetManagementServiceClient(
            node=self.container.node)
        self.processdispatchclient = ProcessDispatcherServiceClient(
            node=self.container.node)
        self.dataset_management = self.datasetclient

    def create_logger(self, name, stream_id=''):

        # logger process
        producer_definition = ProcessDefinition(name=name + '_logger')
        producer_definition.executable = {
            'module': 'ion.processes.data.stream_granule_logger',
            'class': 'StreamGranuleLogger'
        }

        logger_procdef_id = self.processdispatchclient.create_process_definition(
            process_definition=producer_definition)
        configuration = {
            'process': {
                'stream_id': stream_id,
            }
        }
        pid = self.processdispatchclient.schedule_process(
            process_definition_id=logger_procdef_id,
            configuration=configuration)

        return pid

    def _create_instrument_model(self):

        instModel_obj = IonObject(RT.InstrumentModel,
                                  name='SBE37IMModel',
                                  description="SBE37IMModel")
        instModel_id = self.imsclient.create_instrument_model(instModel_obj)

        return instModel_id

    def _create_instrument_agent(self, instModel_id):

        raw_config = StreamConfiguration(
            stream_name='raw', parameter_dictionary_name='ctd_raw_param_dict')
        parsed_config = StreamConfiguration(
            stream_name='parsed',
            parameter_dictionary_name='ctd_parsed_param_dict')

        instAgent_obj = IonObject(
            RT.InstrumentAgent,
            name='agent007',
            description="SBE37IMAgent",
            driver_uri=DRV_URI_GOOD,
            stream_configurations=[raw_config, parsed_config])
        instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj)

        self.imsclient.assign_instrument_model_to_instrument_agent(
            instModel_id, instAgent_id)

        return instAgent_id

    def _create_instrument_device(self, instModel_id):

        instDevice_obj = IonObject(RT.InstrumentDevice,
                                   name='SBE37IMDevice',
                                   description="SBE37IMDevice",
                                   serial_number="12345")

        instDevice_id = self.imsclient.create_instrument_device(
            instrument_device=instDevice_obj)
        self.imsclient.assign_instrument_model_to_instrument_device(
            instModel_id, instDevice_id)

        log.debug(
            "test_activateInstrumentSample: new InstrumentDevice id = %s    (SA Req: L4-CI-SA-RQ-241) ",
            instDevice_id)

        return instDevice_id

    def _create_instrument_agent_instance(self, instAgent_id, instDevice_id):

        port_agent_config = {
            'device_addr': CFG.device.sbe37.host,
            'device_port': CFG.device.sbe37.port,
            'process_type': PortAgentProcessType.UNIX,
            'binary_path': "port_agent",
            'port_agent_addr': 'localhost',
            'command_port': CFG.device.sbe37.port_agent_cmd_port,
            'data_port': CFG.device.sbe37.port_agent_data_port,
            'log_level': 5,
            'type': PortAgentType.ETHERNET
        }

        instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance,
                                          name='SBE37IMAgentInstance',
                                          description="SBE37IMAgentInstance",
                                          port_agent_config=port_agent_config)

        instAgentInstance_id = self.imsclient.create_instrument_agent_instance(
            instAgentInstance_obj, instAgent_id, instDevice_id)

        return instAgentInstance_id

    def _create_param_dicts(self):
        tdom, sdom = time_series_domain()

        self.sdom = sdom.dump()
        self.tdom = tdom.dump()

        self.pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            'ctd_parsed_param_dict', id_only=True)

    def _create_input_data_products(
        self,
        ctd_stream_def_id,
        instDevice_id,
    ):

        dp_obj = IonObject(RT.DataProduct,
                           name='the parsed data',
                           description='ctd stream test',
                           temporal_domain=self.tdom,
                           spatial_domain=self.sdom)

        ctd_parsed_data_product = self.dataproductclient.create_data_product(
            dp_obj, ctd_stream_def_id)

        self.damsclient.assign_data_product(
            input_resource_id=instDevice_id,
            data_product_id=ctd_parsed_data_product)

        self.dataproductclient.activate_data_product_persistence(
            data_product_id=ctd_parsed_data_product)

        #---------------------------------------------------------------------------
        # Retrieve the id of the OUTPUT stream from the out Data Product
        #---------------------------------------------------------------------------

        stream_ids, _ = self.rrclient.find_objects(ctd_parsed_data_product,
                                                   PRED.hasStream, None, True)

        pid = self.create_logger('ctd_parsed', stream_ids[0])
        self.loggerpids.append(pid)

        #---------------------------------------------------------------------------
        # Create CTD Raw as the second data product
        #---------------------------------------------------------------------------
        if not self.pdict_id:
            self._create_param_dicts()
        raw_stream_def_id = self.pubsubclient.create_stream_definition(
            name='SBE37_RAW', parameter_dictionary_id=self.pdict_id)

        dp_obj = IonObject(RT.DataProduct,
                           name='the raw data',
                           description='raw stream test',
                           temporal_domain=self.tdom,
                           spatial_domain=self.sdom)

        ctd_raw_data_product = self.dataproductclient.create_data_product(
            dp_obj, raw_stream_def_id)

        self.damsclient.assign_data_product(
            input_resource_id=instDevice_id,
            data_product_id=ctd_raw_data_product)

        self.dataproductclient.activate_data_product_persistence(
            data_product_id=ctd_raw_data_product)

        #---------------------------------------------------------------------------
        # Retrieve the id of the OUTPUT stream from the out Data Product
        #---------------------------------------------------------------------------

        stream_ids, _ = self.rrclient.find_objects(ctd_raw_data_product,
                                                   PRED.hasStream, None, True)

        #---------------------------------------------------------------------------
        # Retrieve the id of the OUTPUT stream from the out Data Product
        #---------------------------------------------------------------------------

        stream_ids, _ = self.rrclient.find_objects(ctd_raw_data_product,
                                                   PRED.hasStream, None, True)
        print 'Data product streams2 = ', stream_ids

        return ctd_parsed_data_product

    def _create_data_process_definitions(self):

        #-------------------------------------------------------------------------------------
        # L0 Conductivity - Temperature - Pressure: Data Process Definition
        #-------------------------------------------------------------------------------------

        dpd_obj = IonObject(
            RT.DataProcessDefinition,
            name='ctd_L0_all',
            description='transform ctd package into three separate L0 streams',
            module='ion.processes.data.transforms.ctd.ctd_L0_all',
            class_name='ctd_L0_all')
        self.ctd_L0_all_dprocdef_id = self.dataprocessclient.create_data_process_definition(
            dpd_obj)

        #-------------------------------------------------------------------------------------
        # L1 Conductivity: Data Process Definition
        #-------------------------------------------------------------------------------------
        dpd_obj = IonObject(
            RT.DataProcessDefinition,
            name='ctd_L1_conductivity',
            description='create the L1 conductivity data product',
            module='ion.processes.data.transforms.ctd.ctd_L1_conductivity',
            class_name='CTDL1ConductivityTransform')
        self.ctd_L1_conductivity_dprocdef_id = self.dataprocessclient.create_data_process_definition(
            dpd_obj)

        #-------------------------------------------------------------------------------------
        # L1 Pressure: Data Process Definition
        #-------------------------------------------------------------------------------------
        dpd_obj = IonObject(
            RT.DataProcessDefinition,
            name='ctd_L1_pressure',
            description='create the L1 pressure data product',
            module='ion.processes.data.transforms.ctd.ctd_L1_pressure',
            class_name='CTDL1PressureTransform')
        self.ctd_L1_pressure_dprocdef_id = self.dataprocessclient.create_data_process_definition(
            dpd_obj)

        #-------------------------------------------------------------------------------------
        # L1 Temperature: Data Process Definition
        #-------------------------------------------------------------------------------------
        dpd_obj = IonObject(
            RT.DataProcessDefinition,
            name='ctd_L1_temperature',
            description='create the L1 temperature data product',
            module='ion.processes.data.transforms.ctd.ctd_L1_temperature',
            class_name='CTDL1TemperatureTransform')
        self.ctd_L1_temperature_dprocdef_id = self.dataprocessclient.create_data_process_definition(
            dpd_obj)

        #-------------------------------------------------------------------------------------
        # L2 Salinity: Data Process Definition
        #-------------------------------------------------------------------------------------
        dpd_obj = IonObject(
            RT.DataProcessDefinition,
            name='ctd_L2_salinity',
            description='create the L1 temperature data product',
            module='ion.processes.data.transforms.ctd.ctd_L2_salinity',
            class_name='SalinityTransform')
        self.ctd_L2_salinity_dprocdef_id = self.dataprocessclient.create_data_process_definition(
            dpd_obj)

        #-------------------------------------------------------------------------------------
        # L2 Density: Data Process Definition
        #-------------------------------------------------------------------------------------
        dpd_obj = IonObject(
            RT.DataProcessDefinition,
            name='ctd_L2_density',
            description='create the L1 temperature data product',
            module='ion.processes.data.transforms.ctd.ctd_L2_density',
            class_name='DensityTransform')
        self.ctd_L2_density_dprocdef_id = self.dataprocessclient.create_data_process_definition(
            dpd_obj)

        return self.ctd_L0_all_dprocdef_id, self.ctd_L1_conductivity_dprocdef_id,\
               self.ctd_L1_pressure_dprocdef_id,self.ctd_L1_temperature_dprocdef_id, \
                self.ctd_L2_salinity_dprocdef_id, self.ctd_L2_density_dprocdef_id

    def _create_stream_definitions(self):
        if not self.pdict_id:
            self._create_param_dicts()

        outgoing_stream_l0_conductivity_id = self.pubsubclient.create_stream_definition(
            name='L0_Conductivity', parameter_dictionary_id=self.pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            outgoing_stream_l0_conductivity_id,
            self.ctd_L0_all_dprocdef_id,
            binding='conductivity')

        outgoing_stream_l0_pressure_id = self.pubsubclient.create_stream_definition(
            name='L0_Pressure', parameter_dictionary_id=self.pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            outgoing_stream_l0_pressure_id,
            self.ctd_L0_all_dprocdef_id,
            binding='pressure')

        outgoing_stream_l0_temperature_id = self.pubsubclient.create_stream_definition(
            name='L0_Temperature', parameter_dictionary_id=self.pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            outgoing_stream_l0_temperature_id,
            self.ctd_L0_all_dprocdef_id,
            binding='temperature')

        return outgoing_stream_l0_conductivity_id, outgoing_stream_l0_pressure_id, outgoing_stream_l0_temperature_id

    def _create_l0_output_data_products(self,
                                        outgoing_stream_l0_conductivity_id,
                                        outgoing_stream_l0_pressure_id,
                                        outgoing_stream_l0_temperature_id):

        out_data_prods = []

        ctd_l0_conductivity_output_dp_obj = IonObject(
            RT.DataProduct,
            name='L0_Conductivity',
            description='transform output conductivity',
            temporal_domain=self.tdom,
            spatial_domain=self.sdom)

        self.ctd_l0_conductivity_output_dp_id = self.dataproductclient.create_data_product(
            ctd_l0_conductivity_output_dp_obj,
            outgoing_stream_l0_conductivity_id)
        out_data_prods.append(self.ctd_l0_conductivity_output_dp_id)
        self.dataproductclient.activate_data_product_persistence(
            data_product_id=self.ctd_l0_conductivity_output_dp_id)

        ctd_l0_pressure_output_dp_obj = IonObject(
            RT.DataProduct,
            name='L0_Pressure',
            description='transform output pressure',
            temporal_domain=self.tdom,
            spatial_domain=self.sdom)

        self.ctd_l0_pressure_output_dp_id = self.dataproductclient.create_data_product(
            ctd_l0_pressure_output_dp_obj, outgoing_stream_l0_pressure_id)
        out_data_prods.append(self.ctd_l0_pressure_output_dp_id)
        self.dataproductclient.activate_data_product_persistence(
            data_product_id=self.ctd_l0_pressure_output_dp_id)

        ctd_l0_temperature_output_dp_obj = IonObject(
            RT.DataProduct,
            name='L0_Temperature',
            description='transform output temperature',
            temporal_domain=self.tdom,
            spatial_domain=self.sdom)

        self.ctd_l0_temperature_output_dp_id = self.dataproductclient.create_data_product(
            ctd_l0_temperature_output_dp_obj,
            outgoing_stream_l0_temperature_id)
        out_data_prods.append(self.ctd_l0_temperature_output_dp_id)
        self.dataproductclient.activate_data_product_persistence(
            data_product_id=self.ctd_l0_temperature_output_dp_id)

        return out_data_prods

    def _create_l1_out_data_products(self):

        ctd_l1_conductivity_output_dp_obj = IonObject(
            RT.DataProduct,
            name='L1_Conductivity',
            description='transform output L1 conductivity',
            temporal_domain=self.tdom,
            spatial_domain=self.sdom)

        self.ctd_l1_conductivity_output_dp_id = self.dataproductclient.create_data_product(
            ctd_l1_conductivity_output_dp_obj,
            self.outgoing_stream_l1_conductivity_id)
        self.dataproductclient.activate_data_product_persistence(
            data_product_id=self.ctd_l1_conductivity_output_dp_id)
        # Retrieve the id of the OUTPUT stream from the out Data Product and add to granule logger
        stream_ids, _ = self.rrclient.find_objects(
            self.ctd_l1_conductivity_output_dp_id, PRED.hasStream, None, True)
        pid = self.create_logger('ctd_l1_conductivity', stream_ids[0])
        self.loggerpids.append(pid)

        ctd_l1_pressure_output_dp_obj = IonObject(
            RT.DataProduct,
            name='L1_Pressure',
            description='transform output L1 pressure',
            temporal_domain=self.tdom,
            spatial_domain=self.sdom)

        self.ctd_l1_pressure_output_dp_id = self.dataproductclient.create_data_product(
            ctd_l1_pressure_output_dp_obj, self.outgoing_stream_l1_pressure_id)
        self.dataproductclient.activate_data_product_persistence(
            data_product_id=self.ctd_l1_pressure_output_dp_id)
        # Retrieve the id of the OUTPUT stream from the out Data Product and add to granule logger
        stream_ids, _ = self.rrclient.find_objects(
            self.ctd_l1_pressure_output_dp_id, PRED.hasStream, None, True)
        pid = self.create_logger('ctd_l1_pressure', stream_ids[0])
        self.loggerpids.append(pid)

        ctd_l1_temperature_output_dp_obj = IonObject(
            RT.DataProduct,
            name='L1_Temperature',
            description='transform output L1 temperature',
            temporal_domain=self.tdom,
            spatial_domain=self.sdom)

        self.ctd_l1_temperature_output_dp_id = self.dataproductclient.create_data_product(
            ctd_l1_temperature_output_dp_obj,
            self.outgoing_stream_l1_temperature_id)
        self.dataproductclient.activate_data_product_persistence(
            data_product_id=self.ctd_l1_temperature_output_dp_id)
        # Retrieve the id of the OUTPUT stream from the out Data Product and add to granule logger
        stream_ids, _ = self.rrclient.find_objects(
            self.ctd_l1_temperature_output_dp_id, PRED.hasStream, None, True)
        pid = self.create_logger('ctd_l1_temperature', stream_ids[0])
        self.loggerpids.append(pid)

    def _create_l2_out_data_products(self):

        #-------------------------------
        # L2 Salinity - Density: Output Data Products
        #-------------------------------

        if not self.pdict_id: self._create_param_dicts()
        outgoing_stream_l2_salinity_id = self.pubsubclient.create_stream_definition(
            name='L2_salinity', parameter_dictionary_id=self.pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            outgoing_stream_l2_salinity_id,
            self.ctd_L2_salinity_dprocdef_id,
            binding='salinity')

        outgoing_stream_l2_density_id = self.pubsubclient.create_stream_definition(
            name='L2_Density', parameter_dictionary_id=self.pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            outgoing_stream_l2_density_id,
            self.ctd_L2_density_dprocdef_id,
            binding='density')

        ctd_l2_salinity_output_dp_obj = IonObject(
            RT.DataProduct,
            name='L2_Salinity',
            description='transform output L2 salinity',
            temporal_domain=self.tdom,
            spatial_domain=self.sdom)

        self.ctd_l2_salinity_output_dp_id = self.dataproductclient.create_data_product(
            ctd_l2_salinity_output_dp_obj, outgoing_stream_l2_salinity_id)

        self.dataproductclient.activate_data_product_persistence(
            data_product_id=self.ctd_l2_salinity_output_dp_id)
        # Retrieve the id of the OUTPUT stream from the out Data Product and add to granule logger
        stream_ids, _ = self.rrclient.find_objects(
            self.ctd_l2_salinity_output_dp_id, PRED.hasStream, None, True)
        pid = self.create_logger('ctd_l2_salinity', stream_ids[0])
        self.loggerpids.append(pid)

        ctd_l2_density_output_dp_obj = IonObject(
            RT.DataProduct,
            name='L2_Density',
            description='transform output pressure',
            temporal_domain=self.tdom,
            spatial_domain=self.sdom)

        self.ctd_l2_density_output_dp_id = self.dataproductclient.create_data_product(
            ctd_l2_density_output_dp_obj, outgoing_stream_l2_density_id)

        self.dataproductclient.activate_data_product_persistence(
            data_product_id=self.ctd_l2_density_output_dp_id)
        # Retrieve the id of the OUTPUT stream from the out Data Product and add to granule logger
        stream_ids, _ = self.rrclient.find_objects(
            self.ctd_l2_density_output_dp_id, PRED.hasStream, None, True)
        pid = self.create_logger('ctd_l2_density', stream_ids[0])
        self.loggerpids.append(pid)

    @unittest.skip('This test errors on coi-nightly, may be OBE.')
    def test_createTransformsThenActivateInstrument(self):

        self.loggerpids = []

        #-------------------------------------------------------------------------------------
        # Create InstrumentModel
        #-------------------------------------------------------------------------------------

        instModel_id = self._create_instrument_model()

        #-------------------------------------------------------------------------------------
        # Create InstrumentAgent
        #-------------------------------------------------------------------------------------

        instAgent_id = self._create_instrument_agent(instModel_id)

        #-------------------------------------------------------------------------------------
        # Create InstrumentDevice
        #-------------------------------------------------------------------------------------

        instDevice_id = self._create_instrument_device(instModel_id)

        #-------------------------------------------------------------------------------------
        # Create Instrument Agent Instance
        #-------------------------------------------------------------------------------------

        instAgentInstance_id = self._create_instrument_agent_instance(
            instAgent_id, instDevice_id)

        #-------------------------------------------------------------------------------------
        # create a stream definition for the data from the ctd simulator
        #-------------------------------------------------------------------------------------

        self._create_param_dicts()
        ctd_stream_def_id = self.pubsubclient.create_stream_definition(
            name='SBE37_CDM', parameter_dictionary_id=self.pdict_id)

        #-------------------------------------------------------------------------------------
        # Create two data products
        #-------------------------------------------------------------------------------------

        ctd_parsed_data_product = self._create_input_data_products(
            ctd_stream_def_id, instDevice_id)

        #-------------------------------------------------------------------------------------
        # Create data process definitions
        #-------------------------------------------------------------------------------------
        self._create_data_process_definitions()

        #-------------------------------------------------------------------------------------
        # L0 Conductivity - Temperature - Pressure: Output Data Products
        #-------------------------------------------------------------------------------------

        outgoing_stream_l0_conductivity_id, \
        outgoing_stream_l0_pressure_id, \
        outgoing_stream_l0_temperature_id = self._create_stream_definitions()

        self.out_prod_ids = self._create_l0_output_data_products(
            outgoing_stream_l0_conductivity_id, outgoing_stream_l0_pressure_id,
            outgoing_stream_l0_temperature_id)

        self.outgoing_stream_l1_conductivity_id = self.pubsubclient.create_stream_definition(
            name='L1_conductivity', parameter_dictionary_id=self.pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            self.outgoing_stream_l1_conductivity_id,
            self.ctd_L1_conductivity_dprocdef_id,
            binding='conductivity')

        self.outgoing_stream_l1_pressure_id = self.pubsubclient.create_stream_definition(
            name='L1_Pressure', parameter_dictionary_id=self.pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            self.outgoing_stream_l1_pressure_id,
            self.ctd_L1_pressure_dprocdef_id,
            binding='pressure')

        self.outgoing_stream_l1_temperature_id = self.pubsubclient.create_stream_definition(
            name='L1_Temperature', parameter_dictionary_id=self.pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            self.outgoing_stream_l1_temperature_id,
            self.ctd_L1_temperature_dprocdef_id,
            binding='temperature')

        self._create_l1_out_data_products()

        self._create_l2_out_data_products()

        #-------------------------------------------------------------------------------------
        # L0 Conductivity - Temperature - Pressure: Create the data process
        #-------------------------------------------------------------------------------------
        ctd_l0_all_data_process_id = self.dataprocessclient.create_data_process(
            data_process_definition_id=self.ctd_L0_all_dprocdef_id,
            in_data_product_ids=[ctd_parsed_data_product],
            out_data_product_ids=self.out_prod_ids)
        self.dataprocessclient.activate_data_process(
            ctd_l0_all_data_process_id)

        data_process = self.rrclient.read(ctd_l0_all_data_process_id)

        process_ids, _ = self.rrclient.find_objects(
            subject=ctd_l0_all_data_process_id,
            predicate=PRED.hasProcess,
            id_only=True)
        self.addCleanup(self.processdispatchclient.cancel_process,
                        process_ids[0])

        extended_process = self.dataprocessclient.get_data_process_extension(
            ctd_l0_all_data_process_id)
        self.assertEquals(extended_process.computed.operational_state.status,
                          ComputedValueAvailability.NOTAVAILABLE)
        self.assertEquals(data_process.message_controllable, True)

        #-------------------------------------------------------------------------------------
        # L1 Conductivity: Create the data process
        #-------------------------------------------------------------------------------------
        l1_conductivity_data_process_id = self.dataprocessclient.create_data_process(
            self.ctd_L1_conductivity_dprocdef_id,
            [self.ctd_l0_conductivity_output_dp_id],
            [self.ctd_l1_conductivity_output_dp_id])
        self.dataprocessclient.activate_data_process(
            l1_conductivity_data_process_id)

        data_process = self.rrclient.read(l1_conductivity_data_process_id)
        process_ids, _ = self.rrclient.find_objects(
            subject=l1_conductivity_data_process_id,
            predicate=PRED.hasProcess,
            id_only=True)
        self.addCleanup(self.processdispatchclient.cancel_process,
                        process_ids[0])

        #-------------------------------------------------------------------------------------
        # L1 Pressure: Create the data process
        #-------------------------------------------------------------------------------------
        l1_pressure_data_process_id = self.dataprocessclient.create_data_process(
            self.ctd_L1_pressure_dprocdef_id,
            [self.ctd_l0_pressure_output_dp_id],
            [self.ctd_l1_pressure_output_dp_id])
        self.dataprocessclient.activate_data_process(
            l1_pressure_data_process_id)

        data_process = self.rrclient.read(l1_pressure_data_process_id)
        process_ids, _ = self.rrclient.find_objects(
            subject=l1_pressure_data_process_id,
            predicate=PRED.hasProcess,
            id_only=True)
        self.addCleanup(self.processdispatchclient.cancel_process,
                        process_ids[0])

        #-------------------------------------------------------------------------------------
        # L1 Temperature: Create the data process
        #-------------------------------------------------------------------------------------
        l1_temperature_all_data_process_id = self.dataprocessclient.create_data_process(
            self.ctd_L1_temperature_dprocdef_id,
            [self.ctd_l0_temperature_output_dp_id],
            [self.ctd_l1_temperature_output_dp_id])
        self.dataprocessclient.activate_data_process(
            l1_temperature_all_data_process_id)

        data_process = self.rrclient.read(l1_temperature_all_data_process_id)
        process_ids, _ = self.rrclient.find_objects(
            subject=l1_temperature_all_data_process_id,
            predicate=PRED.hasProcess,
            id_only=True)
        self.addCleanup(self.processdispatchclient.cancel_process,
                        process_ids[0])

        #-------------------------------------------------------------------------------------
        # L2 Salinity: Create the data process
        #-------------------------------------------------------------------------------------
        l2_salinity_all_data_process_id = self.dataprocessclient.create_data_process(
            self.ctd_L2_salinity_dprocdef_id, [ctd_parsed_data_product],
            [self.ctd_l2_salinity_output_dp_id])
        self.dataprocessclient.activate_data_process(
            l2_salinity_all_data_process_id)

        data_process = self.rrclient.read(l2_salinity_all_data_process_id)
        process_ids, _ = self.rrclient.find_objects(
            subject=l2_salinity_all_data_process_id,
            predicate=PRED.hasProcess,
            id_only=True)
        self.addCleanup(self.processdispatchclient.cancel_process,
                        process_ids[0])

        #-------------------------------------------------------------------------------------
        # L2 Density: Create the data process
        #-------------------------------------------------------------------------------------
        l2_density_all_data_process_id = self.dataprocessclient.create_data_process(
            self.ctd_L2_density_dprocdef_id, [ctd_parsed_data_product],
            [self.ctd_l2_density_output_dp_id])
        self.dataprocessclient.activate_data_process(
            l2_density_all_data_process_id)

        data_process = self.rrclient.read(l2_density_all_data_process_id)
        process_ids, _ = self.rrclient.find_objects(
            subject=l2_density_all_data_process_id,
            predicate=PRED.hasProcess,
            id_only=True)
        self.addCleanup(self.processdispatchclient.cancel_process,
                        process_ids[0])

        #-------------------------------------------------------------------------------------
        # Launch InstrumentAgentInstance, connect to the resource agent client
        #-------------------------------------------------------------------------------------
        self.imsclient.start_instrument_agent_instance(
            instrument_agent_instance_id=instAgentInstance_id)
        self.addCleanup(self.imsclient.stop_instrument_agent_instance,
                        instrument_agent_instance_id=instAgentInstance_id)

        inst_agent_instance_obj = self.imsclient.read_instrument_agent_instance(
            instAgentInstance_id)

        # Wait for instrument agent to spawn
        gate = AgentProcessStateGate(self.processdispatchclient.read_process,
                                     instDevice_id, ProcessStateEnum.RUNNING)
        self.assertTrue(
            gate. await (15),
            "The instrument agent instance did not spawn in 15 seconds")

        # Start a resource agent client to talk with the instrument agent.
        self._ia_client = ResourceAgentClient(instDevice_id,
                                              to_name=gate.process_id,
                                              process=FakeProcess())

        #-------------------------------------------------------------------------------------
        # Streaming
        #-------------------------------------------------------------------------------------

        cmd = AgentCommand(command=ResourceAgentEvent.INITIALIZE)
        reply = self._ia_client.execute_agent(cmd)
        self.assertTrue(reply.status == 0)

        cmd = AgentCommand(command=ResourceAgentEvent.GO_ACTIVE)
        reply = self._ia_client.execute_agent(cmd)
        self.assertTrue(reply.status == 0)

        cmd = AgentCommand(command=ResourceAgentEvent.GET_RESOURCE_STATE)
        retval = self._ia_client.execute_agent(cmd)
        state = retval.result
        log.debug(
            "(L4-CI-SA-RQ-334): current state after sending go_active command %s",
            str(state))
        self.assertTrue(state, 'DRIVER_STATE_COMMAND')

        cmd = AgentCommand(command=ResourceAgentEvent.RUN)
        reply = self._ia_client.execute_agent(cmd)
        self.assertTrue(reply.status == 0)

        #todo ResourceAgentClient no longer has method set_param
        #        # Make sure the sampling rate and transmission are sane.
        #        params = {
        #            SBE37Parameter.NAVG : 1,
        #            SBE37Parameter.INTERVAL : 5,
        #            SBE37Parameter.TXREALTIME : True
        #        }
        #        self._ia_client.set_param(params)

        #todo There is no ResourceAgentEvent attribute for go_streaming... so what should be the command for it?
        cmd = AgentCommand(command=SBE37ProtocolEvent.START_AUTOSAMPLE)
        retval = self._ia_client.execute_resource(cmd)

        # This gevent sleep is there to test the autosample time, which will show something different from default
        # only if the instrument runs for over a minute
        gevent.sleep(90)

        extended_instrument = self.imsclient.get_instrument_device_extension(
            instrument_device_id=instDevice_id)

        self.assertIsInstance(extended_instrument.computed.uptime,
                              ComputedStringValue)

        autosample_string = extended_instrument.computed.uptime.value
        autosampling_time = int(autosample_string.split()[4])

        self.assertTrue(autosampling_time > 0)

        cmd = AgentCommand(command=SBE37ProtocolEvent.STOP_AUTOSAMPLE)
        retval = self._ia_client.execute_resource(cmd)

        #todo There is no ResourceAgentEvent attribute for go_observatory... so what should be the command for it?
        #        log.debug("test_activateInstrumentStream: calling go_observatory")
        #        cmd = AgentCommand(command='go_observatory')
        #        reply = self._ia_client.execute_agent(cmd)
        #        cmd = AgentCommand(command='get_current_state')
        #        retval = self._ia_client.execute_agent(cmd)
        #        state = retval.result
        #        log.debug("test_activateInstrumentStream: return from go_observatory state  %s", str(state))

        cmd = AgentCommand(command=ResourceAgentEvent.RESET)
        reply = self._ia_client.execute_agent(cmd)
        self.assertTrue(reply.status == 0)

        #-------------------------------------------------------------------------------------------------
        # Cleanup processes
        #-------------------------------------------------------------------------------------------------
        for pid in self.loggerpids:
            self.processdispatchclient.cancel_process(pid)

        #--------------------------------------------------------------------------------
        # Cleanup data products
        #--------------------------------------------------------------------------------
        dp_ids, _ = self.rrclient.find_resources(restype=RT.DataProduct,
                                                 id_only=True)

        for dp_id in dp_ids:
            self.dataproductclient.delete_data_product(dp_id)
class TestDeployment(IonIntegrationTestCase):

    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.omsclient = ObservatoryManagementServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
        self.dmpsclient = DataProductManagementServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.psmsclient = PubsubManagementServiceClient(node=self.container.node)
        self.dataset_management = DatasetManagementServiceClient()

        self.c = DotDict()
        self.c.resource_registry = self.rrclient
        self.RR2 = EnhancedResourceRegistryClient(self.rrclient)

        self.dsmsclient = DataProcessManagementServiceClient(node=self.container.node)


        # deactivate all data processes when tests are complete
        def killAllDataProcesses():
            for proc_id in self.rrclient.find_resources(RT.DataProcess, None, None, True)[0]:
                self.dsmsclient.deactivate_data_process(proc_id)
                self.dsmsclient.delete_data_process(proc_id)
        self.addCleanup(killAllDataProcesses)


    #@unittest.skip("targeting")
    def test_create_deployment(self):

        #create a deployment with metadata and an initial site and device
        platform_site__obj = IonObject(RT.PlatformSite,
                                        name='PlatformSite1',
                                        description='test platform site')
        site_id = self.omsclient.create_platform_site(platform_site__obj)

        platform_device__obj = IonObject(RT.PlatformDevice,
                                        name='PlatformDevice1',
                                        description='test platform device')
        device_id = self.imsclient.create_platform_device(platform_device__obj)

        start = IonTime(datetime.datetime(2013,1,1))
        end = IonTime(datetime.datetime(2014,1,1))
        temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=start.to_string(), end_datetime=end.to_string())
        deployment_obj = IonObject(RT.Deployment,
                                        name='TestDeployment',
                                        description='some new deployment',
                                        constraint_list=[temporal_bounds])
        deployment_id = self.omsclient.create_deployment(deployment_obj)
        self.omsclient.deploy_platform_site(site_id, deployment_id)
        self.imsclient.deploy_platform_device(device_id, deployment_id)

        log.debug("test_create_deployment: created deployment id: %s ", str(deployment_id) )

        #retrieve the deployment objects and check that the assoc site and device are attached
        read_deployment_obj = self.omsclient.read_deployment(deployment_id)
        log.debug("test_create_deployment: created deployment obj: %s ", str(read_deployment_obj) )

        site_ids, _ = self.rrclient.find_subjects(RT.PlatformSite, PRED.hasDeployment, deployment_id, True)
        self.assertEqual(len(site_ids), 1)

        device_ids, _ = self.rrclient.find_subjects(RT.PlatformDevice, PRED.hasDeployment, deployment_id, True)
        self.assertEqual(len(device_ids), 1)

        #delete the deployment
        self.RR2.pluck(deployment_id)
        self.omsclient.force_delete_deployment(deployment_id)
        # now try to get the deleted dp object
        try:
            self.omsclient.read_deployment(deployment_id)
        except NotFound:
            pass
        else:
            self.fail("deleted deployment was found during read")




    #@unittest.skip("targeting")
    def base_activate_deployment(self):

        #-------------------------------------------------------------------------------------
        # Create platform site, platform device, platform model
        #-------------------------------------------------------------------------------------

        platform_site__obj = IonObject(RT.PlatformSite,
                                        name='PlatformSite1',
                                        description='test platform site')
        platform_site_id = self.omsclient.create_platform_site(platform_site__obj)

        platform_device_obj = IonObject(RT.PlatformDevice,
                                        name='PlatformDevice1',
                                        description='test platform device')
        platform_device_id = self.imsclient.create_platform_device(platform_device_obj)

        platform_model__obj = IonObject(RT.PlatformModel,
                                        name='PlatformModel1',
                                        description='test platform model')
        platform_model_id = self.imsclient.create_platform_model(platform_model__obj)



        #-------------------------------------------------------------------------------------
        # Create instrument site
        #-------------------------------------------------------------------------------------

        instrument_site_obj = IonObject(RT.InstrumentSite,
                                        name='InstrumentSite1',
                                        description='test instrument site')
        instrument_site_id = self.omsclient.create_instrument_site(instrument_site_obj, platform_site_id)

        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        ctd_stream_def_id = self.psmsclient.create_stream_definition(name='SBE37_CDM', parameter_dictionary_id=pdict_id)


        #----------------------------------------------------------------------------------------------------
        # Create an instrument device
        #----------------------------------------------------------------------------------------------------

        instrument_device_obj = IonObject(RT.InstrumentDevice,
                                        name='InstrumentDevice1',
                                        description='test instrument device')
        instrument_device_id = self.imsclient.create_instrument_device(instrument_device_obj)
        self.rrclient.create_association(platform_device_id, PRED.hasDevice, instrument_device_id)



        #----------------------------------------------------------------------------------------------------
        # Create an instrument model
        #----------------------------------------------------------------------------------------------------

        instrument_model_obj = IonObject(RT.InstrumentModel,
                                        name='InstrumentModel1',
                                        description='test instrument model')
        instrument_model_id = self.imsclient.create_instrument_model(instrument_model_obj)


        #----------------------------------------------------------------------------------------------------
        # Create a deployment object
        #----------------------------------------------------------------------------------------------------

        start = IonTime(datetime.datetime(2013,1,1))
        end = IonTime(datetime.datetime(2014,1,1))
        temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=start.to_string(), end_datetime=end.to_string())
        deployment_obj = IonObject(RT.Deployment,
                                   name='TestDeployment',
                                   description='some new deployment',
                                   context=IonObject(OT.CabledNodeDeploymentContext),
                                   constraint_list=[temporal_bounds])
        deployment_id = self.omsclient.create_deployment(deployment_obj)

        log.debug("test_create_deployment: created deployment id: %s ", str(deployment_id) )


        ret = DotDict(instrument_site_id=instrument_site_id,
                      instrument_device_id=instrument_device_id,
                      instrument_model_id=instrument_model_id,
                      platform_site_id=platform_site_id,
                      platform_device_id=platform_device_id,
                      platform_model_id=platform_model_id,
                      deployment_id=deployment_id)


        return ret

    #@unittest.skip("targeting")
    def test_activate_deployment_normal(self):

        res = self.base_activate_deployment()

        log.debug("assigning platform and instrument models")
        self.imsclient.assign_platform_model_to_platform_device(res.platform_model_id, res.platform_device_id)
        self.imsclient.assign_instrument_model_to_instrument_device(res.instrument_model_id, res.instrument_device_id)
        self.omsclient.assign_platform_model_to_platform_site(res.platform_model_id, res.platform_site_id)
        self.omsclient.assign_instrument_model_to_instrument_site(res.instrument_model_id, res.instrument_site_id)

        log.debug("adding instrument site and device to deployment")
        self.omsclient.deploy_instrument_site(res.instrument_site_id, res.deployment_id)
        self.imsclient.deploy_instrument_device(res.instrument_device_id, res.deployment_id)

        log.debug("adding platform site and device to deployment")
        self.omsclient.deploy_platform_site(res.platform_site_id, res.deployment_id)
        self.imsclient.deploy_platform_device(res.platform_device_id, res.deployment_id)

        log.debug("activating deployment, expecting success")
        self.omsclient.activate_deployment(res.deployment_id)

        log.debug("deactivatin deployment, expecting success")
        self.omsclient.deactivate_deployment(res.deployment_id)

    #@unittest.skip("targeting")
    def test_activate_deployment_nomodels(self):

        res = self.base_activate_deployment()

        self.omsclient.deploy_instrument_site(res.instrument_site_id, res.deployment_id)
        self.imsclient.deploy_instrument_device(res.instrument_device_id, res.deployment_id)

        log.debug("activating deployment without site+device models, expecting fail")
        self.assert_deploy_fail(res.deployment_id, NotFound, "Expected 1")

        log.debug("assigning instrument site model")
        self.omsclient.assign_instrument_model_to_instrument_site(res.instrument_model_id, res.instrument_site_id)

        log.debug("activating deployment without device models, expecting fail")
        self.assert_deploy_fail(res.deployment_id, NotFound, "Expected 1")

    #@unittest.skip("targeting")
    def test_activate_deployment_nosite(self):

        res = self.base_activate_deployment()

        log.debug("assigning instrument models")
        self.imsclient.assign_instrument_model_to_instrument_device(res.instrument_model_id, res.instrument_device_id)
        self.omsclient.assign_instrument_model_to_instrument_site(res.instrument_model_id, res.instrument_site_id)

        log.debug("deploying instrument device only")
        self.imsclient.deploy_instrument_device(res.instrument_device_id, res.deployment_id)

        log.debug("activating deployment without instrument site, expecting fail")
        self.assert_deploy_fail(res.deployment_id, BadRequest, "Devices in this deployment outnumber sites")

    #@unittest.skip("targeting")
    def test_activate_deployment_nodevice(self):

        res = self.base_activate_deployment()

        log.debug("assigning platform and instrument models")
        self.imsclient.assign_instrument_model_to_instrument_device(res.instrument_model_id, res.instrument_device_id)
        self.omsclient.assign_instrument_model_to_instrument_site(res.instrument_model_id, res.instrument_site_id)

        log.debug("deploying instrument site only")
        self.omsclient.deploy_instrument_site(res.instrument_site_id, res.deployment_id)

        log.debug("activating deployment without device, expecting fail")
        self.assert_deploy_fail(res.deployment_id, BadRequest, "No devices were found in the deployment")


    def test_activate_deployment_asymmetric_children(self):
        """
        P0
        |  \
        P1  P2
        |
        I1

        Complex deployment using CSP

        P1, P2, and P3 share the same platform model.  The CSP solver should be able to work this out
        based on relationships to parents

        """

        log.debug("create models")
        imodel_id = self.RR2.create(any_old(RT.InstrumentModel))
        pmodel_id = self.RR2.create(any_old(RT.PlatformModel))

        log.debug("create devices")
        idevice_id = self.RR2.create(any_old(RT.InstrumentDevice))
        pdevice_id = [self.RR2.create(any_old(RT.PlatformDevice)) for _ in range(3)]

        log.debug("create sites")
        isite_id = self.RR2.create(any_old(RT.InstrumentSite))
        psite_id = [self.RR2.create(any_old(RT.PlatformSite)) for _ in range(3)]

        log.debug("assign models")
        self.RR2.assign_instrument_model_to_instrument_device_with_has_model(imodel_id, idevice_id)
        self.RR2.assign_instrument_model_to_instrument_site_with_has_model(imodel_id, isite_id)
        for x in range(3):
            self.RR2.assign_platform_model_to_platform_device_with_has_model(pmodel_id, pdevice_id[x])
            self.RR2.assign_platform_model_to_platform_site_with_has_model(pmodel_id, psite_id[x])

        log.debug("assign hierarchy")
        self.RR2.assign_instrument_device_to_platform_device_with_has_device(idevice_id, pdevice_id[1])
        self.RR2.assign_instrument_site_to_platform_site_with_has_site(isite_id, psite_id[1])
        for x in range(1,3):
            self.RR2.assign_platform_device_to_platform_device_with_has_device(pdevice_id[x], pdevice_id[0])
            self.RR2.assign_platform_site_to_platform_site_with_has_site(psite_id[x], psite_id[0])

        log.debug("create and activate deployment")
        dep_id = self.RR2.create(any_old(RT.Deployment, {"context": IonObject(OT.RemotePlatformDeploymentContext)}))
        self.RR2.assign_deployment_to_platform_device_with_has_deployment(dep_id, pdevice_id[0])
        self.RR2.assign_deployment_to_platform_site_with_has_deployment(dep_id, psite_id[0])
        self.omsclient.activate_deployment(dep_id)

        log.debug("verifying deployment")
        self.assertEqual(idevice_id, self.RR2.find_instrument_device_id_of_instrument_site_using_has_device(isite_id),
                         "The instrument device was not assigned to the instrument site")
        for x in range(3):
            self.assertEqual(pdevice_id[x], self.RR2.find_platform_device_id_of_platform_site_using_has_device(psite_id[x]),
                             "Platform device %d was not assigned to platform site %d" % (x, x))

    def assert_deploy_fail(self, deployment_id, err_type=BadRequest, fail_message="did not specify fail_message"):
        with self.assertRaises(err_type) as cm:
            self.omsclient.activate_deployment(deployment_id)
        self.assertIn(fail_message, cm.exception.message)

    def test_3x3_matchups_remoteplatform(self):
        self.base_3x3_matchups(IonObject(OT.RemotePlatformDeploymentContext))

    def test_3x3_matchups_cabledinstrument(self):
        self.base_3x3_matchups(IonObject(OT.CabledInstrumentDeploymentContext))

    def test_3x3_matchups_cablednode(self):
        self.base_3x3_matchups(IonObject(OT.CabledNodeDeploymentContext))

    def base_3x3_matchups(self, deployment_context):
        """
        This will be 1 root platform, 3 sub platforms (2 of one model, 1 of another) and 3 sub instruments each (2-to-1)
        """
        deployment_context_type = type(deployment_context).__name__

        instrument_model_id  = [self.RR2.create(any_old(RT.InstrumentModel)) for _ in range(6)]
        platform_model_id    = [self.RR2.create(any_old(RT.PlatformModel)) for _ in range(3)]

        instrument_device_id = [self.RR2.create(any_old(RT.InstrumentDevice)) for _ in range(9)]
        platform_device_id   = [self.RR2.create(any_old(RT.PlatformDevice)) for _ in range(4)]

        instrument_site_id   = [self.RR2.create(any_old(RT.InstrumentSite,
                                                {"planned_uplink_port":
                                                     IonObject(OT.PlatformPort,
                                                               reference_designator="instport_%d" % (i+1))}))
                                for i in range(9)]

        platform_site_id     = [self.RR2.create(any_old(RT.PlatformSite,
                                                {"planned_uplink_port":
                                                    IonObject(OT.PlatformPort,
                                                              reference_designator="platport_%d" % (i+1))}))
                                for i in range(4)]



        def instrument_model_at(platform_idx, instrument_idx):
            m = platform_idx * 2
            if instrument_idx > 0:
                m += 1
            return m

        def platform_model_at(platform_idx):
            if platform_idx > 0:
                return 1
            return 0

        def instrument_at(platform_idx, instrument_idx):
            return platform_idx * 3 + instrument_idx

        # set up the structure
        for p in range(3):
            m = platform_model_at(p)
            self.RR2.assign_platform_model_to_platform_site_with_has_model(platform_model_id[m], platform_site_id[p])
            self.RR2.assign_platform_model_to_platform_device_with_has_model(platform_model_id[m], platform_device_id[p])
            self.RR2.assign_platform_device_to_platform_device_with_has_device(platform_device_id[p], platform_device_id[3])
            self.RR2.assign_platform_site_to_platform_site_with_has_site(platform_site_id[p], platform_site_id[3])

            for i in range(3):
                m = instrument_model_at(p, i)
                idx = instrument_at(p, i)
                self.RR2.assign_instrument_model_to_instrument_site_with_has_model(instrument_model_id[m], instrument_site_id[idx])
                self.RR2.assign_instrument_model_to_instrument_device_with_has_model(instrument_model_id[m], instrument_device_id[idx])
                self.RR2.assign_instrument_device_to_platform_device_with_has_device(instrument_device_id[idx], platform_device_id[p])
                self.RR2.assign_instrument_site_to_platform_site_with_has_site(instrument_site_id[idx], platform_site_id[p])

        # top level models
        self.RR2.assign_platform_model_to_platform_device_with_has_model(platform_model_id[2], platform_device_id[3])
        self.RR2.assign_platform_model_to_platform_site_with_has_model(platform_model_id[2], platform_site_id[3])



        # verify structure
        for p in range(3):
            parent_id = self.RR2.find_platform_device_id_by_platform_device_using_has_device(platform_device_id[p])
            self.assertEqual(platform_device_id[3], parent_id)

            parent_id = self.RR2.find_platform_site_id_by_platform_site_using_has_site(platform_site_id[p])
            self.assertEqual(platform_site_id[3], parent_id)

        for i in range(len(platform_site_id)):
            self.assertEqual(self.RR2.find_platform_model_of_platform_device_using_has_model(platform_device_id[i]),
                             self.RR2.find_platform_model_of_platform_site_using_has_model(platform_site_id[i]))

        for i in range(len(instrument_site_id)):
            self.assertEqual(self.RR2.find_instrument_model_of_instrument_device_using_has_model(instrument_device_id[i]),
                             self.RR2.find_instrument_model_of_instrument_site_using_has_model(instrument_site_id[i]))


        port_assignments = {}
        for p in range(3):
            port_assignments[platform_device_id[p]] = "platport_%d" % (p+1)
            for i in range(3):
                idx = instrument_at(p, i)
                port_assignments[instrument_device_id[idx]] = "instport_%d" % (idx+1)

        deployment_id = self.RR2.create(any_old(RT.Deployment,
                {"context": deployment_context,
                 "port_assignments": port_assignments}))


        log.debug("assigning device/site to %s deployment", deployment_context_type)
        if OT.RemotePlatformDeploymentContext == deployment_context_type:
            self.RR2.assign_deployment_to_platform_device_with_has_deployment(deployment_id, platform_device_id[3])
            self.RR2.assign_deployment_to_platform_site_with_has_deployment(deployment_id, platform_site_id[3])

        elif OT.CabledInstrumentDeploymentContext == deployment_context_type:
            self.RR2.assign_deployment_to_instrument_device_with_has_deployment(deployment_id, instrument_device_id[1])
            self.RR2.assign_deployment_to_instrument_site_with_has_deployment(deployment_id, instrument_site_id[1])

        elif OT.CabledNodeDeploymentContext == deployment_context_type:
            self.RR2.assign_deployment_to_platform_device_with_has_deployment(deployment_id, platform_device_id[1])
            self.RR2.assign_deployment_to_platform_site_with_has_deployment(deployment_id, platform_site_id[1])

        log.debug("activation of %s deployment", deployment_context_type)
        self.omsclient.activate_deployment(deployment_id)

        log.debug("validation of %s deployment", deployment_context_type)
        if OT.RemotePlatformDeploymentContext == deployment_context_type:
            # verify proper associations
            for i, d in enumerate(platform_device_id):
                self.assertEqual(d, self.RR2.find_platform_device_id_of_platform_site_using_has_device(platform_site_id[i]))

            for i, d in enumerate(instrument_device_id):
                self.assertEqual(d, self.RR2.find_instrument_device_id_of_instrument_site_using_has_device(instrument_site_id[i]))

        elif OT.CabledInstrumentDeploymentContext == deployment_context_type:
            self.assertEqual(instrument_device_id[1],
                             self.RR2.find_instrument_device_id_of_instrument_site_using_has_device(instrument_site_id[1]))

        elif OT.CabledNodeDeploymentContext == deployment_context_type:
            expected_platforms = [1]
            expected_instruments = [3, 4, 5]

            # verify proper associations
            for i, d in enumerate(platform_device_id):
                self.assertEqual(i in expected_platforms,
                                 d in self.RR2.find_platform_device_ids_of_platform_site_using_has_device(platform_site_id[i]))

            for i, d in enumerate(instrument_device_id):
                self.assertEqual(i in expected_instruments,
                                 d in self.RR2.find_instrument_device_ids_of_instrument_site_using_has_device(instrument_site_id[i]))
class VisStreamLauncher(ImmediateProcess):
    """
    Class emulates a stream source from a NetCDF file. It emits a record of data every few seconds on a
    stream identified by a routing key.

    """


    def on_init(self):
        log.debug("VizStreamProducer init. Self.id=%s" % self.id)

    def on_start(self):

        log.debug("VizStreamProducer start")

        self.data_source_name = self.CFG.get_safe('name', 'sine_wave_generator')
        self.dataset = self.CFG.get_safe('dataset', 'sinusoidal')

        # create a pubsub client and a resource registry client
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.pubsubclient = PubsubManagementServiceClient(node=self.container.node)

        # Dummy instrument related clients
        self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.dpclient = DataProductManagementServiceClient(node=self.container.node)
        self.ingestclient = IngestionManagementServiceClient(node=self.container.node)
        self.dataset_management = DatasetManagementServiceClient()


        # create the pubsub client
        self.pubsubclient = PubsubManagementServiceClient(node=self.container.node)


        # Additional code for creating a dummy instrument
        """
        # Set up the preconditions. Look for an existing ingestion config
        while True:
            log.info("VisStreamLauncher:on_start: Waiting for an ingestion configuration to be available.")
            ingestion_cfgs, _ = self.rrclient.find_resources(RT.IngestionConfiguration, None, None, True)

            if len(ingestion_cfgs) > 0:
                break
            else:
                gevent.sleep(1)
        """


        # Check to see if the data_product already exists in the system (for e.g re launching the code after a crash)
        dp_ids,_ = self.rrclient.find_resources(RT.DataProduct, None, self.data_source_name, True)
        if len(dp_ids) > 0:
            data_product_id = dp_ids[0]
        else:
            # Create InstrumentModel
            instModel_obj = IonObject(RT.InstrumentModel, name=self.data_source_name, description=self.data_source_name)
            instModel_id = self.imsclient.create_instrument_model(instModel_obj)

            # Create InstrumentDevice
            instDevice_obj = IonObject(RT.InstrumentDevice, name=self.data_source_name, description=self.data_source_name, serial_number="12345" )

            instDevice_id = self.imsclient.create_instrument_device(instrument_device=instDevice_obj)
            self.imsclient.assign_instrument_model_to_instrument_device(instModel_id, instDevice_id)

            # create a stream definition for the data from the ctd simulator
            pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
            ctd_stream_def_id = self.pubsubclient.create_stream_definition(name="SBE37_CDM", description="SBE37_CDM", parameter_dictionary_id=pdict_id)


            dp_obj = IonObject(RT.DataProduct,
                name=self.data_source_name,
                description='Example ctd stream')

            data_product_id = self.dpclient.create_data_product(dp_obj, stream_definition_id=ctd_stream_def_id)

            self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=data_product_id)
            self.dpclient.activate_data_product_persistence(data_product_id=data_product_id)

        print ">>>>>>>>>>>>> Dataproduct for sine wave generator : ", data_product_id

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(data_product_id, PRED.hasStream, None, True)

        if self.dataset == 'sinusoidal':
            self.container.spawn_process(name='ctd_test.' + self.data_source_name ,
                module='ion.processes.data.sinusoidal_stream_publisher',cls='SinusoidalCtdPublisher',config={'process':{'stream_id':stream_ids[0]}})
        else:
            self.container.spawn_process(name='ctd_test.' + self.data_source_name ,
                module='ion.processes.data.ctd_stream_publisher',cls='SimpleCtdPublisher',config={'process':{'stream_id':stream_ids[0]}})

        """
        workflow_def_ids,_ = self.rrclient.find_resources(restype=RT.WorkflowDefinition, name='Realtime_HighCharts', id_only=True)

        if not len(workflow_def_ids):
            helper_create_highcharts_workflow_def(self.container)
        """


    def on_quit(self):
        super(VisStreamLauncher, self).on_quit()
        log.debug("VizStreamProducer quit")
class TestTransformWorker(IonIntegrationTestCase):
    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        # Instantiate a process to represent the test
        process = TransformWorkerTestProcess()

        self.dataset_management_client = DatasetManagementServiceClient(
            node=self.container.node)
        self.pubsub_client = PubsubManagementServiceClient(
            node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(
            node=self.container.node)
        self.processdispatchclient = ProcessDispatcherServiceClient(
            node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(
            node=self.container.node)
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceProcessClient(
            node=self.container.node, process=process)

        self.time_dom, self.spatial_dom = time_series_domain()

        self.ph = ParameterHelper(self.dataset_management_client,
                                  self.addCleanup)

        self.wait_time = CFG.get_safe('endpoint.receive.timeout', 10)

    def push_granule(self, data_product_id):
        '''
        Publishes and monitors that the granule arrived
        '''
        datasets, _ = self.rrclient.find_objects(data_product_id,
                                                 PRED.hasDataset,
                                                 id_only=True)
        dataset_monitor = DatasetMonitor(datasets[0])

        rdt = self.ph.rdt_for_data_product(data_product_id)
        self.ph.fill_parsed_rdt(rdt)
        self.ph.publish_rdt_to_data_product(data_product_id, rdt)

        assert dataset_monitor.wait()
        dataset_monitor.stop()

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),
                     'Skip test while in CEI LAUNCH mode')
    def test_transform_worker(self):

        # test that a data process (type: data-product-in / data-product-out) can be defined and launched.
        # verify that the output granule fields are correctly populated

        # test that the input and output data products are linked to facilitate provenance

        self.dp_list = []
        self.data_process_objs = []
        self._output_stream_ids = []
        self.granule_verified = Event()
        self.worker_assigned_event_verified = Event()
        self.dp_created_event_verified = Event()
        self.heartbeat_event_verified = Event()

        self.parameter_dict_id = self.dataset_management_client.read_parameter_dictionary_by_name(
            name='ctd_parsed_param_dict', id_only=True)

        # create the StreamDefinition
        self.stream_def_id = self.pubsub_client.create_stream_definition(
            name='stream_def', parameter_dictionary_id=self.parameter_dict_id)
        self.addCleanup(self.pubsub_client.delete_stream_definition,
                        self.stream_def_id)

        # create the DataProduct that is the input to the data processes
        input_dp_obj = IonObject(RT.DataProduct,
                                 name='input_data_product',
                                 description='input test stream')
        self.input_dp_id = self.dataproductclient.create_data_product(
            data_product=input_dp_obj, stream_definition_id=self.stream_def_id)

        # retrieve the Stream for this data product
        stream_ids, assoc_ids = self.rrclient.find_objects(
            self.input_dp_id, PRED.hasStream, RT.Stream, True)
        self.stream_id = stream_ids[0]

        self.start_event_listener()

        # create the DPD, DataProcess and output DataProduct
        dataprocessdef_id, dataprocess_id, dataproduct_id = self.create_data_process(
        )
        self.dp_list.append(dataprocess_id)

        # validate the repository for data product algorithms persists the new resources  NEW SA-1
        # create_data_process call created one of each
        dpd_ids, _ = self.rrclient.find_resources(
            restype=OT.DataProcessDefinition, id_only=False)
        # there will be more than one becuase of the DPDs that reperesent the PFs in the data product above
        self.assertTrue(dpd_ids is not None)
        dp_ids, _ = self.rrclient.find_resources(restype=OT.DataProcess,
                                                 id_only=False)
        # only one DP becuase the PFs that are in the code dataproduct above are not activated yet.
        self.assertEquals(len(dp_ids), 1)

        # validate the name and version label  NEW SA - 2
        dataprocessdef_obj = self.dataprocessclient.read_data_process_definition(
            dataprocessdef_id)
        self.assertEqual(dataprocessdef_obj.version_label, '1.0a')
        self.assertEqual(dataprocessdef_obj.name, 'add_arrays')

        # validate that the DPD has an attachment  NEW SA - 21
        attachment_ids, assoc_ids = self.rrclient.find_objects(
            dataprocessdef_id, PRED.hasAttachment, RT.Attachment, True)
        self.assertEqual(len(attachment_ids), 1)
        attachment_obj = self.rrclient.read_attachment(attachment_ids[0])
        log.debug('attachment: %s', attachment_obj)

        # validate that the data process resource has input and output data products associated
        # L4-CI-SA-RQ-364  and NEW SA-3
        outproduct_ids, assoc_ids = self.rrclient.find_objects(
            dataprocess_id, PRED.hasOutputProduct, RT.DataProduct, True)
        self.assertEqual(len(outproduct_ids), 1)
        inproduct_ids, assoc_ids = self.rrclient.find_objects(
            dataprocess_id, PRED.hasInputProduct, RT.DataProduct, True)
        self.assertEqual(len(inproduct_ids), 1)

        # Test for provenance. Get Data product produced by the data processes
        output_data_product_id, _ = self.rrclient.find_objects(
            subject=dataprocess_id,
            object_type=RT.DataProduct,
            predicate=PRED.hasOutputProduct,
            id_only=True)

        output_data_product_provenance = self.dataproductclient.get_data_product_provenance(
            output_data_product_id[0])

        # Do a basic check to see if there were 3 entries in the provenance graph. Parent and Child and the
        # DataProcessDefinition creating the child from the parent.
        self.assertTrue(len(output_data_product_provenance) == 2)
        self.assertTrue(self.input_dp_id in output_data_product_provenance[
            output_data_product_id[0]]['parents'])
        self.assertTrue(output_data_product_provenance[
            output_data_product_id[0]]['parents'][self.input_dp_id]
                        ['data_process_definition_id'] == dataprocessdef_id)

        # NEW SA - 4 | Data processing shall include the appropriate data product algorithm name and version number in
        # the metadata of each output data product created by the data product algorithm.
        output_data_product_obj, _ = self.rrclient.find_objects(
            subject=dataprocess_id,
            object_type=RT.DataProduct,
            predicate=PRED.hasOutputProduct,
            id_only=False)
        self.assertTrue(output_data_product_obj[0].name != None)
        self.assertTrue(output_data_product_obj[0]._rev != None)

        # retrieve subscription from data process
        subscription_objs, _ = self.rrclient.find_objects(
            subject=dataprocess_id,
            predicate=PRED.hasSubscription,
            object_type=RT.Subscription,
            id_only=False)
        log.debug('test_transform_worker subscription_obj:  %s',
                  subscription_objs[0])

        # create a queue to catch the published granules
        self.subscription_id = self.pubsub_client.create_subscription(
            name='parsed_subscription',
            stream_ids=[self.stream_id],
            exchange_name=subscription_objs[0].exchange_name)
        self.addCleanup(self.pubsub_client.delete_subscription,
                        self.subscription_id)

        self.pubsub_client.activate_subscription(self.subscription_id)
        self.addCleanup(self.pubsub_client.deactivate_subscription,
                        self.subscription_id)

        stream_route = self.pubsub_client.read_stream_route(self.stream_id)
        self.publisher = StandaloneStreamPublisher(stream_id=self.stream_id,
                                                   stream_route=stream_route)

        for n in range(1, 101):
            rdt = RecordDictionaryTool(stream_definition_id=self.stream_def_id)
            rdt['time'] = [0]  # time should always come first
            rdt['conductivity'] = [1]
            rdt['pressure'] = [2]
            rdt['salinity'] = [8]

            self.publisher.publish(rdt.to_granule())

        # validate that the output granule is received and the updated value is correct
        self.assertTrue(self.granule_verified.wait(self.wait_time))

        # validate that the data process loaded into worker event is received    (L4-CI-SA-RQ-182)
        self.assertTrue(
            self.worker_assigned_event_verified.wait(self.wait_time))

        # validate that the data process create (with data product ids) event is received    (NEW SA -42)
        self.assertTrue(self.dp_created_event_verified.wait(self.wait_time))

        # validate that the data process heartbeat event is received (for every hundred granules processed) (L4-CI-SA-RQ-182)
        #this takes a while so set wait limit to large value
        self.assertTrue(self.heartbeat_event_verified.wait(200))

        # validate that the code from the transform function can be retrieve via inspect_data_process_definition
        src = self.dataprocessclient.inspect_data_process_definition(
            dataprocessdef_id)
        self.assertIn('def add_arrays(a, b)', src)

        # now delete the DPD and DP then verify that the resources are retired so that information required for provenance are still available
        self.dataprocessclient.delete_data_process(dataprocess_id)
        self.dataprocessclient.delete_data_process_definition(
            dataprocessdef_id)

        in_dp_objs, _ = self.rrclient.find_objects(
            subject=dataprocess_id,
            predicate=PRED.hasInputProduct,
            object_type=RT.DataProduct,
            id_only=True)
        self.assertTrue(in_dp_objs is not None)

        dpd_objs, _ = self.rrclient.find_subjects(
            subject_type=RT.DataProcessDefinition,
            predicate=PRED.hasDataProcess,
            object=dataprocess_id,
            id_only=True)
        self.assertTrue(dpd_objs is not None)

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),
                     'Skip test while in CEI LAUNCH mode')
    def test_transform_worker_with_instrumentdevice(self):

        # test that a data process (type: data-product-in / data-product-out) can be defined and launched.
        # verify that the output granule fields are correctly populated

        # test that the input and output data products are linked to facilitate provenance

        self.data_process_objs = []
        self._output_stream_ids = []
        self.event_verified = Event()

        # Create CTD Parsed as the initial data product
        # create a stream definition for the data from the ctd simulator
        self.parameter_dict_id = self.dataset_management_client.read_parameter_dictionary_by_name(
            'ctd_parsed_param_dict', id_only=True)
        self.stream_def_id = self.pubsub_client.create_stream_definition(
            name='stream_def', parameter_dictionary_id=self.parameter_dict_id)

        # create the DataProduct that is the input to the data processes
        input_dp_obj = IonObject(RT.DataProduct,
                                 name='input_data_product',
                                 description='input test stream')
        self.input_dp_id = self.dataproductclient.create_data_product(
            data_product=input_dp_obj, stream_definition_id=self.stream_def_id)

        # retrieve the Stream for this data product
        stream_ids, assoc_ids = self.rrclient.find_objects(
            self.input_dp_id, PRED.hasStream, RT.Stream, True)
        self.stream_id = stream_ids[0]

        log.debug('new ctd_parsed_data_product_id = %s' % self.input_dp_id)

        # only ever need one device for testing purposes.
        instDevice_obj, _ = self.rrclient.find_resources(
            restype=RT.InstrumentDevice, name='test_ctd_device')
        if instDevice_obj:
            instDevice_id = instDevice_obj[0]._id
        else:
            instDevice_obj = IonObject(RT.InstrumentDevice,
                                       name='test_ctd_device',
                                       description="test_ctd_device",
                                       serial_number="12345")
            instDevice_id = self.imsclient.create_instrument_device(
                instrument_device=instDevice_obj)

        self.damsclient.assign_data_product(input_resource_id=instDevice_id,
                                            data_product_id=self.input_dp_id)

        # create the DPD, DataProcess and output DataProduct
        dataprocessdef_id, dataprocess_id, dataproduct_id = self.create_data_process(
        )

        self.addCleanup(self.dataprocessclient.delete_data_process,
                        dataprocess_id)
        self.addCleanup(self.dataprocessclient.delete_data_process_definition,
                        dataprocessdef_id)

        # Test for provenance. Get Data product produced by the data processes
        output_data_product_id, _ = self.rrclient.find_objects(
            subject=dataprocess_id,
            object_type=RT.DataProduct,
            predicate=PRED.hasOutputProduct,
            id_only=True)

        output_data_product_provenance = self.dataproductclient.get_data_product_provenance(
            output_data_product_id[0])

        # Do a basic check to see if there were 3 entries in the provenance graph. Parent and Child and the
        # DataProcessDefinition creating the child from the parent.
        self.assertTrue(len(output_data_product_provenance) == 3)
        self.assertTrue(self.input_dp_id in output_data_product_provenance[
            output_data_product_id[0]]['parents'])
        self.assertTrue(instDevice_id in output_data_product_provenance[
            self.input_dp_id]['parents'])
        self.assertTrue(output_data_product_provenance[instDevice_id]['type']
                        == 'InstrumentDevice')

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),
                     'Skip test while in CEI LAUNCH mode')
    def test_transform_worker_with_platformdevice(self):

        # test that a data process (type: data-product-in / data-product-out) can be defined and launched.
        # verify that the output granule fields are correctly populated

        # test that the input and output data products are linked to facilitate provenance

        self.data_process_objs = []
        self._output_stream_ids = []
        self.event_verified = Event()

        # Create CTD Parsed as the initial data product
        # create a stream definition for the data from the ctd simulator
        self.parameter_dict_id = self.dataset_management_client.read_parameter_dictionary_by_name(
            'ctd_parsed_param_dict', id_only=True)
        self.stream_def_id = self.pubsub_client.create_stream_definition(
            name='stream_def', parameter_dictionary_id=self.parameter_dict_id)

        # create the DataProduct that is the input to the data processes
        input_dp_obj = IonObject(RT.DataProduct,
                                 name='input_data_product',
                                 description='input test stream')
        self.input_dp_id = self.dataproductclient.create_data_product(
            data_product=input_dp_obj, stream_definition_id=self.stream_def_id)

        # retrieve the Stream for this data product
        stream_ids, assoc_ids = self.rrclient.find_objects(
            self.input_dp_id, PRED.hasStream, RT.Stream, True)
        self.stream_id = stream_ids[0]

        log.debug('new ctd_parsed_data_product_id = %s' % self.input_dp_id)

        # only ever need one device for testing purposes.
        platform_device_obj, _ = self.rrclient.find_resources(
            restype=RT.PlatformDevice, name='TestPlatform')
        if platform_device_obj:
            platform_device_id = platform_device_obj[0]._id
        else:
            platform_device_obj = IonObject(RT.PlatformDevice,
                                            name='TestPlatform',
                                            description="TestPlatform",
                                            serial_number="12345")
            platform_device_id = self.imsclient.create_platform_device(
                platform_device=platform_device_obj)

        self.damsclient.assign_data_product(
            input_resource_id=platform_device_id,
            data_product_id=self.input_dp_id)

        # create the DPD, DataProcess and output DataProduct
        dataprocessdef_id, dataprocess_id, dataproduct_id = self.create_data_process(
        )
        self.addCleanup(self.dataprocessclient.delete_data_process,
                        dataprocess_id)
        self.addCleanup(self.dataprocessclient.delete_data_process_definition,
                        dataprocessdef_id)

        # Test for provenance. Get Data product produced by the data processes
        output_data_product_id, _ = self.rrclient.find_objects(
            subject=dataprocess_id,
            object_type=RT.DataProduct,
            predicate=PRED.hasOutputProduct,
            id_only=True)

        output_data_product_provenance = self.dataproductclient.get_data_product_provenance(
            output_data_product_id[0])

        # Do a basic check to see if there were 3 entries in the provenance graph. Parent and Child and the
        # DataProcessDefinition creating the child from the parent.
        self.assertTrue(len(output_data_product_provenance) == 3)
        self.assertTrue(self.input_dp_id in output_data_product_provenance[
            output_data_product_id[0]]['parents'])
        self.assertTrue(platform_device_id in output_data_product_provenance[
            self.input_dp_id]['parents'])
        self.assertTrue(output_data_product_provenance[platform_device_id]
                        ['type'] == 'PlatformDevice')

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),
                     'Skip test while in CEI LAUNCH mode')
    def test_event_transform_worker(self):
        self.data_process_objs = []
        self._output_stream_ids = []
        self.event_verified = Event()

        # test that a data process (type: data-product-in / event-out) can be defined and launched.
        # verify that event fields are correctly populated

        self.parameter_dict_id = self.dataset_management_client.read_parameter_dictionary_by_name(
            name='ctd_parsed_param_dict', id_only=True)

        # create the StreamDefinition
        self.stream_def_id = self.pubsub_client.create_stream_definition(
            name='stream_def', parameter_dictionary_id=self.parameter_dict_id)
        self.addCleanup(self.pubsub_client.delete_stream_definition,
                        self.stream_def_id)

        # create the DataProduct
        input_dp_obj = IonObject(RT.DataProduct,
                                 name='input_data_product',
                                 description='input test stream')
        self.input_dp_id = self.dataproductclient.create_data_product(
            data_product=input_dp_obj, stream_definition_id=self.stream_def_id)

        # retrieve the Stream for this data product
        stream_ids, assoc_ids = self.rrclient.find_objects(
            self.input_dp_id, PRED.hasStream, RT.Stream, True)
        self.stream_id = stream_ids[0]

        # create the DPD and two DPs
        self.event_data_process_id = self.create_event_data_processes()

        # retrieve subscription from data process
        subscription_objs, _ = self.rrclient.find_objects(
            subject=self.event_data_process_id,
            predicate=PRED.hasSubscription,
            object_type=RT.Subscription,
            id_only=False)
        log.debug('test_event_transform_worker subscription_obj:  %s',
                  subscription_objs[0])

        # create a queue to catch the published granules
        self.subscription_id = self.pubsub_client.create_subscription(
            name='parsed_subscription',
            stream_ids=[self.stream_id],
            exchange_name=subscription_objs[0].exchange_name)
        self.addCleanup(self.pubsub_client.delete_subscription,
                        self.subscription_id)

        self.pubsub_client.activate_subscription(self.subscription_id)
        self.addCleanup(self.pubsub_client.deactivate_subscription,
                        self.subscription_id)

        stream_route = self.pubsub_client.read_stream_route(self.stream_id)
        self.publisher = StandaloneStreamPublisher(stream_id=self.stream_id,
                                                   stream_route=stream_route)

        self.start_event_transform_listener()

        self.data_modified = Event()

        rdt = RecordDictionaryTool(stream_definition_id=self.stream_def_id)
        rdt['time'] = [0]  # time should always come first
        rdt['conductivity'] = [1]
        rdt['pressure'] = [2]
        rdt['salinity'] = [8]

        self.publisher.publish(rdt.to_granule())

        self.assertTrue(self.event_verified.wait(self.wait_time))

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),
                     'Skip test while in CEI LAUNCH mode')
    def test_bad_argument_map(self):
        self._output_stream_ids = []

        # test that a data process (type: data-product-in / data-product-out) parameter mapping it validated during
        # data process creation and that the correct exception is raised for both input and output.

        self.parameter_dict_id = self.dataset_management_client.read_parameter_dictionary_by_name(
            name='ctd_parsed_param_dict', id_only=True)

        # create the StreamDefinition
        self.stream_def_id = self.pubsub_client.create_stream_definition(
            name='stream_def', parameter_dictionary_id=self.parameter_dict_id)
        self.addCleanup(self.pubsub_client.delete_stream_definition,
                        self.stream_def_id)

        # create the DataProduct that is the input to the data processes
        input_dp_obj = IonObject(RT.DataProduct,
                                 name='input_data_product',
                                 description='input test stream')
        self.input_dp_id = self.dataproductclient.create_data_product(
            data_product=input_dp_obj, stream_definition_id=self.stream_def_id)

        # two data processes using one transform and one DPD

        dp1_func_output_dp_id = self.create_output_data_product()

        # Set up DPD and DP #2 - array add function
        tf_obj = IonObject(
            RT.TransformFunction,
            name='add_array_func',
            description='adds values in an array',
            function='add_arrays',
            module="ion_example.add_arrays",
            arguments=['arr1', 'arr2'],
            function_type=TransformFunctionType.TRANSFORM,
            uri=
            'http://sddevrepo.oceanobservatories.org/releases/ion_example-0.1-py2.7.egg'
        )
        add_array_func_id, rev = self.rrclient.create(tf_obj)

        dpd_obj = IonObject(
            RT.DataProcessDefinition,
            name='add_arrays',
            description='adds the values of two arrays',
            data_process_type=DataProcessTypeEnum.TRANSFORM_PROCESS)
        add_array_dpd_id = self.dataprocessclient.create_data_process_definition(
            data_process_definition=dpd_obj, function_id=add_array_func_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            self.stream_def_id, add_array_dpd_id, binding='add_array_func')

        # create the data process with invalid argument map
        argument_map = {"arr1": "foo", "arr2": "bar"}
        output_param = "salinity"
        with self.assertRaises(BadRequest) as cm:
            dp1_data_process_id = self.dataprocessclient.create_data_process(
                data_process_definition_id=add_array_dpd_id,
                inputs=[self.input_dp_id],
                outputs=[dp1_func_output_dp_id],
                argument_map=argument_map,
                out_param_name=output_param)

        ex = cm.exception
        log.debug(' exception raised: %s', cm)
        self.assertEqual(
            ex.message,
            "Input data product does not contain the parameters defined in argument map"
        )

        # create the data process with invalid output parameter name
        argument_map = {"arr1": "conductivity", "arr2": "pressure"}
        output_param = "foo"
        with self.assertRaises(BadRequest) as cm:
            dp1_data_process_id = self.dataprocessclient.create_data_process(
                data_process_definition_id=add_array_dpd_id,
                inputs=[self.input_dp_id],
                outputs=[dp1_func_output_dp_id],
                argument_map=argument_map,
                out_param_name=output_param)

        ex = cm.exception
        log.debug(' exception raised: %s', cm)
        self.assertEqual(
            ex.message,
            "Output data product does not contain the output parameter name provided"
        )

    def create_event_data_processes(self):

        # two data processes using one transform and one DPD
        argument_map = {"a": "salinity"}

        # set up DPD and DP #2 - array add function
        tf_obj = IonObject(
            RT.TransformFunction,
            name='validate_salinity_array',
            description='validate_salinity_array',
            function='validate_salinity_array',
            module="ion.processes.data.transforms.test.test_transform_worker",
            arguments=['a'],
            function_type=TransformFunctionType.TRANSFORM)

        add_array_func_id, rev = self.rrclient.create(tf_obj)

        dpd_obj = IonObject(
            RT.DataProcessDefinition,
            name='validate_salinity_array',
            description='validate_salinity_array',
            data_process_type=DataProcessTypeEnum.TRANSFORM_PROCESS,
        )
        add_array_dpd_id = self.dataprocessclient.create_data_process_definition(
            data_process_definition=dpd_obj, function_id=add_array_func_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            self.stream_def_id,
            add_array_dpd_id,
            binding='validate_salinity_array')

        # create the data process
        dp1_data_process_id = self.dataprocessclient.create_data_process(
            data_process_definition_id=add_array_dpd_id,
            inputs=[self.input_dp_id],
            outputs=None,
            argument_map=argument_map)
        self.damsclient.register_process(dp1_data_process_id)
        self.addCleanup(self.dataprocessclient.delete_data_process,
                        dp1_data_process_id)

        return dp1_data_process_id

    def create_data_process(self):

        # two data processes using one transform and one DPD

        dp1_func_output_dp_id = self.create_output_data_product()
        argument_map = {"arr1": "conductivity", "arr2": "pressure"}
        output_param = "salinity"

        # set up DPD and DP #2 - array add function
        tf_obj = IonObject(
            RT.TransformFunction,
            name='add_array_func',
            description='adds values in an array',
            function='add_arrays',
            module="ion_example.add_arrays",
            arguments=['arr1', 'arr2'],
            function_type=TransformFunctionType.TRANSFORM,
            uri=
            'http://sddevrepo.oceanobservatories.org/releases/ion_example-0.1-py2.7.egg'
        )
        add_array_func_id, rev = self.rrclient.create(tf_obj)

        dpd_obj = IonObject(
            RT.DataProcessDefinition,
            name='add_arrays',
            description='adds the values of two arrays',
            data_process_type=DataProcessTypeEnum.TRANSFORM_PROCESS,
            version_label='1.0a')
        add_array_dpd_id = self.dataprocessclient.create_data_process_definition(
            data_process_definition=dpd_obj, function_id=add_array_func_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            self.stream_def_id, add_array_dpd_id, binding='add_array_func')

        # create the data process
        dp1_data_process_id = self.dataprocessclient.create_data_process(
            data_process_definition_id=add_array_dpd_id,
            inputs=[self.input_dp_id],
            outputs=[dp1_func_output_dp_id],
            argument_map=argument_map,
            out_param_name=output_param)
        self.damsclient.register_process(dp1_data_process_id)
        #self.addCleanup(self.dataprocessclient.delete_data_process, dp1_data_process_id)

        # add an attachment object to this DPD to test new SA-21
        import msgpack
        attachment_content = 'foo bar'
        attachment_obj = IonObject(RT.Attachment,
                                   name='test_attachment',
                                   attachment_type=AttachmentType.ASCII,
                                   content_type='text/plain',
                                   content=msgpack.packb(attachment_content))
        att_id = self.rrclient.create_attachment(add_array_dpd_id,
                                                 attachment_obj)
        self.addCleanup(self.rrclient.delete_attachment, att_id)

        return add_array_dpd_id, dp1_data_process_id, dp1_func_output_dp_id

    def create_output_data_product(self):
        dp1_outgoing_stream_id = self.pubsub_client.create_stream_definition(
            name='dp1_stream', parameter_dictionary_id=self.parameter_dict_id)

        dp1_output_dp_obj = IonObject(RT.DataProduct,
                                      name='data_process1_data_product',
                                      description='output of add array func')

        dp1_func_output_dp_id = self.dataproductclient.create_data_product(
            dp1_output_dp_obj, dp1_outgoing_stream_id)
        self.addCleanup(self.dataproductclient.delete_data_product,
                        dp1_func_output_dp_id)
        # retrieve the id of the OUTPUT stream from the out Data Product and add to granule logger
        stream_ids, _ = self.rrclient.find_objects(dp1_func_output_dp_id,
                                                   PRED.hasStream, None, True)
        self._output_stream_ids.append(stream_ids[0])

        subscription_id = self.pubsub_client.create_subscription(
            'validator', data_product_ids=[dp1_func_output_dp_id])
        self.addCleanup(self.pubsub_client.delete_subscription,
                        subscription_id)

        def on_granule(msg, route, stream_id):
            log.debug('recv_packet stream_id: %s route: %s   msg: %s',
                      stream_id, route, msg)
            self.validate_output_granule(msg, route, stream_id)
            self.granule_verified.set()

        validator = StandaloneStreamSubscriber('validator',
                                               callback=on_granule)
        validator.start()
        self.addCleanup(validator.stop)

        self.pubsub_client.activate_subscription(subscription_id)
        self.addCleanup(self.pubsub_client.deactivate_subscription,
                        subscription_id)

        return dp1_func_output_dp_id

    def validate_event(self, *args, **kwargs):
        """
        This method is a callback function for receiving DataProcessStatusEvent.
        """
        data_process_event = args[0]
        log.debug("DataProcessStatusEvent: %s",
                  str(data_process_event.__dict__))

        # if data process already created, check origin
        if self.dp_list:
            self.assertIn(data_process_event.origin, self.dp_list)

            # if this is a heartbeat event then 100 granules have been processed
            if 'data process status update.' in data_process_event.description:
                self.heartbeat_event_verified.set()

        else:
            # else check that this is the assign event

            if 'Data process assigned to transform worker' in data_process_event.description:
                self.worker_assigned_event_verified.set()
            elif 'Data process created for data product' in data_process_event.description:
                self.dp_created_event_verified.set()

    def validate_output_granule(self, msg, route, stream_id):
        self.assertIn(stream_id, self._output_stream_ids)

        rdt = RecordDictionaryTool.load_from_granule(msg)
        log.debug('validate_output_granule  rdt: %s', rdt)
        sal_val = rdt['salinity']
        np.testing.assert_array_equal(sal_val, np.array([3]))

    def start_event_listener(self):

        es = EventSubscriber(event_type=OT.DataProcessStatusEvent,
                             callback=self.validate_event)
        es.start()

        self.addCleanup(es.stop)

    def validate_transform_event(self, *args, **kwargs):
        """
        This method is a callback function for receiving DataProcessStatusEvent.
        """
        status_alert_event = args[0]

        np.testing.assert_array_equal(status_alert_event.origin,
                                      self.stream_id)
        np.testing.assert_array_equal(status_alert_event.values,
                                      np.array([self.event_data_process_id]))
        log.debug("DeviceStatusAlertEvent: %s",
                  str(status_alert_event.__dict__))
        self.event_verified.set()

    def start_event_transform_listener(self):
        es = EventSubscriber(event_type=OT.DeviceStatusAlertEvent,
                             callback=self.validate_transform_event)
        es.start()

        self.addCleanup(es.stop)

    def test_download(self):
        egg_url = 'http://sddevrepo.oceanobservatories.org/releases/ion_example-0.1-py2.7.egg'
        egg_path = TransformWorker.download_egg(egg_url)

        import pkg_resources
        pkg_resources.working_set.add_entry(egg_path)

        from ion_example.add_arrays import add_arrays

        a = add_arrays(1, 2)
        self.assertEquals(a, 3)
class TestResourceRegistry(IonIntegrationTestCase):

    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to bank service
        self.resource_registry_service = ResourceRegistryServiceClient()

    @unittest.skip('Represents a bug in storage/retrieval')
    def test_tuple_in_dict(self):
        # create a resource with a tuple saved in a dict
        transform_obj = IonObject(RT.Transform)
        transform_obj.configuration = {}
        transform_obj.configuration["tuple"] = ('STRING',)
        transform_id, _ = self.resource_registry_service.create(transform_obj)

        # read the resource back
        returned_transform_obj = self.resource_registry_service.read(transform_id)

        self.assertEqual(transform_obj.configuration["tuple"], returned_transform_obj.configuration["tuple"])

    def test_basics(self):
        # Sequence all the tests so that we can save numerous system start and stops
        self._do_test_crud()
        self._do_test_read_mult()
        self._do_test_lifecycle()
        self._do_test_attach()
        self._do_test_association()
        self._do_test_find_resources()
        self._do_test_find_objects_mult()

    def _do_test_crud(self):
        # Some quick registry tests
        # Can't call new with fields that aren't defined in the object's schema
        with self.assertRaises(TypeError) as cm:
            IonObject("UserInfo", name="name", foo="bar")
        self.assertTrue(cm.exception.message == "__init__() got an unexpected keyword argument 'foo'")

        # Can't call new with fields that aren't defined in the object's schema
        with self.assertRaises(TypeError) as cm:
            IonObject("UserInfo", {"name": "name", "foo": "bar"})
        self.assertTrue(cm.exception.message == "__init__() got an unexpected keyword argument 'foo'")

        # Can't call new with fields that aren't defined in the object's schema
        with self.assertRaises(TypeError) as cm:
            IonObject("UserInfo", {"name": "name"}, foo="bar")
        self.assertTrue(cm.exception.message == "__init__() got an unexpected keyword argument 'foo'")

        # Instantiate an object
        obj = IonObject("UserInfo", name="name")
        
        # Can set attributes that aren't in the object's schema
        with self.assertRaises(AttributeError) as cm:
            setattr(obj, "foo", "bar")
        self.assertTrue(cm.exception.message == "'UserInfo' object has no attribute 'foo'")

        # Cam't call update with object that hasn't been persisted
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.update(obj)
        self.assertTrue(cm.exception.message.startswith("Object does not have required '_id' or '_rev' attribute"))

        # Persist object and read it back
        obj_id, obj_rev = self.resource_registry_service.create(obj)
        read_obj = self.resource_registry_service.read(obj_id)

        # Cannot create object with _id and _rev fields pre-set
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create(read_obj)

        # Update object
        read_obj.name = "John Doe"
        self.resource_registry_service.update(read_obj)

        # Update should fail with revision mismatch
        with self.assertRaises(Conflict) as cm:
            self.resource_registry_service.update(read_obj)

        # Re-read and update object
        read_obj = self.resource_registry_service.read(obj_id)
        self.resource_registry_service.update(read_obj)
        
        # Delete object
        self.resource_registry_service.delete(obj_id)

        # Make sure read, update and delete report error
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.read(obj_id)
        self.assertTrue(cm.exception.message.startswith("Object with id"))

        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.update(read_obj)
        self.assertTrue(cm.exception.message.startswith("Object with id"))

        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.delete(obj_id)
        self.assertTrue(cm.exception.message.startswith("Object with id"))

        # Owner creation tests
        user = IonObject("ActorIdentity", name='user')
        uid,_ = self.resource_registry_service.create(user)

        inst = IonObject("InstrumentDevice", name='instrument')
        iid,_ = self.resource_registry_service.create(inst, headers={'ion-actor-id':str(uid)})

        ids,_ = self.resource_registry_service.find_objects(iid, PRED.hasOwner, RT.ActorIdentity, id_only=True)
        self.assertEquals(len(ids), 1)

        assoc = self.resource_registry_service.read(ids[0])
        self.resource_registry_service.delete(iid)

        with self.assertRaises(NotFound) as ex:
            assoc = self.resource_registry_service.read(iid)

    def _do_test_read_mult(self):
        test_resource1_id,_  = self.resource_registry_service.create(Resource(name='test1'))
        test_resource2_id,_  = self.resource_registry_service.create(Resource(name='test2'))

        res_list = [test_resource1_id, test_resource2_id]

        objects = self.resource_registry_service.read_mult(res_list)

        for o in objects:
            self.assertIsInstance(o,Resource)
            self.assertTrue(o._id in res_list)

    def _do_test_lifecycle(self):
        # Lifecycle tests
        att = IonObject("InstrumentDevice", name='mine', description='desc')

        rid,rev = self.resource_registry_service.create(att)

        att1 = self.resource_registry_service.read(rid)
        self.assertEquals(att1.name, att.name)
        self.assertEquals(att1.lcstate, LCS.DRAFT)
        self.assertEquals(att1.availability, AS.PRIVATE)

        new_state = self.resource_registry_service.execute_lifecycle_transition(rid, LCE.PLAN)
        self.assertEquals(new_state, lcstate(LCS.PLANNED, AS.PRIVATE))

        att2 = self.resource_registry_service.read(rid)
        self.assertEquals(att2.lcstate, LCS.PLANNED)
        self.assertEquals(att2.availability, AS.PRIVATE)

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.execute_lifecycle_transition(rid, LCE.UNANNOUNCE)
        self.assertTrue("type=InstrumentDevice, lcstate=PLANNED_PRIVATE has no transition for event unannounce" in cm.exception.message)

        new_state = self.resource_registry_service.execute_lifecycle_transition(rid, LCE.DEVELOP)
        self.assertEquals(new_state, lcstate(LCS.DEVELOPED, AS.PRIVATE))

        with self.assertRaises(BadRequest):
            self.resource_registry_service.execute_lifecycle_transition(
                    resource_id=rid, transition_event='NONE##')

        self.resource_registry_service.set_lifecycle_state(rid, lcstate(LCS.INTEGRATED, AS.PRIVATE))
        att1 = self.resource_registry_service.read(rid)
        self.assertEquals(att1.lcstate, LCS.INTEGRATED)
        self.assertEquals(att1.availability, AS.PRIVATE)

    def _do_test_attach(self):
        binary = "\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x10\x00\x00\x00\x10\x08\x03\x00\x00\x00(-\x0fS\x00\x00\x00\x03sBIT\x08\x08\x08\xdb\xe1O\xe0\x00\x00\x00~PLTEf3\x00\xfc\xf7\xe0\xee\xcc\x00\xd3\xa0\x00\xcc\x99\x00\xec\xcdc\x9fl\x00\xdd\xb2\x00\xff\xff\xff|I\x00\xf9\xdb\x00\xdd\xb5\x19\xd9\xad\x10\xb6\x83\x00\xf8\xd6\x00\xf2\xc5\x00\xd8\xab\x00n;\x00\xff\xcc\x00\xd6\xa4\t\xeb\xb8\x00\x83Q\x00\xadz\x00\xff\xde\x00\xff\xd6\x00\xd6\xa3\x00\xdf\xaf\x00\xde\xad\x10\xbc\x8e\x00\xec\xbe\x00\xec\xd4d\xff\xe3\x00tA\x00\xf6\xc4\x00\xf6\xce\x00\xa5u\x00\xde\xa5\x00\xf7\xbd\x00\xd6\xad\x08\xdd\xaf\x19\x8cR\x00\xea\xb7\x00\xee\xe9\xdf\xc5\x00\x00\x00\tpHYs\x00\x00\n\xf0\x00\x00\n\xf0\x01B\xac4\x98\x00\x00\x00\x1ctEXtSoftware\x00Adobe Fireworks CS4\x06\xb2\xd3\xa0\x00\x00\x00\x15tEXtCreation Time\x0029/4/09Oq\xfdE\x00\x00\x00\xadIDAT\x18\x95M\x8f\x8d\x0e\x820\x0c\x84;ZdC~f\x07\xb2\x11D\x86\x89\xe8\xfb\xbf\xa0+h\xe2\x97\\\xd2^\x93\xb6\x07:1\x9f)q\x9e\xa5\x06\xad\xd5\x13\x8b\xac,\xb3\x02\x9d\x12C\xa1-\xef;M\x08*\x19\xce\x0e?\x1a\xeb4\xcc\xd4\x0c\x831\x87V\xca\xa1\x1a\xd3\x08@\xe4\xbd\xb7\x15P;\xc8\xd4{\x91\xbf\x11\x90\xffg\xdd\x8di\xfa\xb6\x0bs2Z\xff\xe8yg2\xdc\x11T\x96\xc7\x05\xa5\xef\x96+\xa7\xa59E\xae\xe1\x84cm^1\xa6\xb3\xda\x85\xc8\xd8/\x17se\x0eN^'\x8c\xc7\x8e\x88\xa8\xf6p\x8e\xc2;\xc6.\xd0\x11.\x91o\x12\x7f\xcb\xa5\xfe\x00\x89]\x10:\xf5\x00\x0e\xbf\x00\x00\x00\x00IEND\xaeB`\x82"

        # Owner creation tests
        instrument = IonObject("InstrumentDevice", name='instrument')
        iid,_ = self.resource_registry_service.create(instrument)

        att = Attachment(content=binary, attachment_type=AttachmentType.BLOB)
        aid1 = self.resource_registry_service.create_attachment(iid, att)

        att1 = self.resource_registry_service.read_attachment(aid1, include_content=True)
        self.assertEquals(binary, att1.content)

        import base64
        att = Attachment(content=base64.encodestring(binary), attachment_type=AttachmentType.ASCII)
        aid2 = self.resource_registry_service.create_attachment(iid, att)

        att1 = self.resource_registry_service.read_attachment(aid2, include_content=True)
        self.assertEquals(binary, base64.decodestring(att1.content))

        att_ids = self.resource_registry_service.find_attachments(iid, id_only=True)
        self.assertEquals(att_ids, [aid1, aid2])

        att_ids = self.resource_registry_service.find_attachments(iid, id_only=True, descending=True)
        self.assertEquals(att_ids, [aid2, aid1])

        att_ids = self.resource_registry_service.find_attachments(iid, id_only=True, descending=True, limit=1)
        self.assertEquals(att_ids, [aid2])

        atts = self.resource_registry_service.find_attachments(iid, id_only=False, include_content=True, limit=1)
        self.assertEquals(atts[0].content, binary)

        self.resource_registry_service.delete_attachment(aid1)

        att_ids = self.resource_registry_service.find_attachments(iid, id_only=True)
        self.assertEquals(att_ids, [aid2])

        self.resource_registry_service.delete_attachment(aid2)

        att_ids = self.resource_registry_service.find_attachments(iid, id_only=True)
        self.assertEquals(att_ids, [])

    def _do_test_association(self):
        # Instantiate ActorIdentity object
        actor_identity_obj = IonObject("ActorIdentity", name="name")
        actor_identity_obj_id, actor_identity_obj_rev = self.resource_registry_service.create(actor_identity_obj)
        read_actor_identity_obj = self.resource_registry_service.read(actor_identity_obj_id)

        # Instantiate UserInfo object
        user_info_obj = IonObject("UserInfo", name="name")
        user_info_obj_id, user_info_obj_rev = self.resource_registry_service.create(user_info_obj)
        read_user_info_obj = self.resource_registry_service.read(user_info_obj_id)

        # Test create failures
        with self.assertRaises(AttributeError) as cm:
            self.resource_registry_service.create_association(actor_identity_obj_id, PRED.bogus, user_info_obj_id)
        self.assertTrue(cm.exception.message == "bogus")

        # Predicate not provided
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(actor_identity_obj_id, None, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Association must have all elements set")

        # Subject id or object not provided
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(None, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Association must have all elements set")

        # Object id or object not provided
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(actor_identity_obj_id, PRED.hasInfo, None)
        self.assertTrue(cm.exception.message == "Association must have all elements set")

        # Bad subject id
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.create_association("bogus", PRED.hasInfo, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Object with id bogus does not exist.")

        # Bad object id
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.create_association(actor_identity_obj_id, PRED.hasInfo, "bogus")
        self.assertTrue(cm.exception.message == "Object with id bogus does not exist.")

        # _id missing from subject
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(actor_identity_obj, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(cm.exception.message.startswith("Subject id"))

        # _id missing from object
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(actor_identity_obj_id, PRED.hasInfo, user_info_obj)
        self.assertTrue(cm.exception.message.startswith("Object id"))

        # Wrong subject type
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(user_info_obj_id, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Illegal subject type UserInfo for predicate hasInfo")

        # Wrong object type
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(actor_identity_obj_id, PRED.hasInfo, actor_identity_obj_id)
        self.assertTrue(cm.exception.message == "Illegal object type ActorIdentity for predicate hasInfo")

        # Create two different association types between the same subject and predicate
        assoc_id1, assoc_rev1 = self.resource_registry_service.create_association(actor_identity_obj_id, PRED.hasInfo, user_info_obj_id)

        # Read object, subject
        res_obj1 = self.resource_registry_service.read_object(actor_identity_obj_id, PRED.hasInfo, RT.UserInfo)
        self.assertEquals(res_obj1._id, user_info_obj_id)
        res_obj1 = self.resource_registry_service.read_object(actor_identity_obj_id, PRED.hasInfo, RT.UserInfo, id_only=True)
        self.assertEquals(res_obj1, user_info_obj_id)
        res_obj2 = self.resource_registry_service.read_subject(RT.ActorIdentity, PRED.hasInfo, user_info_obj_id)
        self.assertEquals(res_obj2._id, actor_identity_obj_id)
        res_obj2 = self.resource_registry_service.read_subject(RT.ActorIdentity, PRED.hasInfo, user_info_obj_id, id_only=True)
        self.assertEquals(res_obj2, actor_identity_obj_id)

        # Search for associations (good cases)
        ret1 = self.resource_registry_service.find_associations(actor_identity_obj_id, PRED.hasInfo, user_info_obj_id)
        ret2 = self.resource_registry_service.find_associations(actor_identity_obj_id, PRED.hasInfo)
        ret3 = self.resource_registry_service.find_associations(None, PRED.hasInfo)
        self.assertTrue(len(ret1) == len(ret2) == len(ret3))
        self.assertTrue(ret1[0]._id == ret2[0]._id == ret3[0]._id)

        ret1 = self.resource_registry_service.find_associations(actor_identity_obj_id, PRED.hasInfo, user_info_obj_id, None, False)
        ret2 = self.resource_registry_service.find_associations(actor_identity_obj_id, PRED.hasInfo, id_only=False)
        ret3 = self.resource_registry_service.find_associations(predicate=PRED.hasInfo, id_only=False)
        self.assertTrue(ret1 == ret2 == ret3)

        # Search for associations (good cases)
        ret1 = self.resource_registry_service.find_associations(read_actor_identity_obj, PRED.hasInfo, read_user_info_obj)
        ret2 = self.resource_registry_service.find_associations(read_actor_identity_obj, PRED.hasInfo)
        ret3 = self.resource_registry_service.find_associations(None, PRED.hasInfo)
        self.assertTrue(len(ret1) == len(ret2) == len(ret3))
        self.assertTrue(ret1[0]._id == ret2[0]._id == ret3[0]._id)

        ret1 = self.resource_registry_service.find_associations(actor_identity_obj_id, PRED.hasInfo, read_user_info_obj, None, True)
        ret2 = self.resource_registry_service.find_associations(actor_identity_obj_id, PRED.hasInfo, id_only=True)
        ret3 = self.resource_registry_service.find_associations(predicate=PRED.hasInfo, id_only=True)
        self.assertTrue(ret1 == ret2 == ret3)

        # Search for associations (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_associations(None, None, None)
        self.assertIn("Illegal parameters", cm.exception.message)

        # Find subjects (good cases)
        subj_ret1 = self.resource_registry_service.find_subjects(RT.ActorIdentity, PRED.hasInfo, user_info_obj_id, True)
        subj_ret2 = self.resource_registry_service.find_subjects(RT.ActorIdentity, PRED.hasInfo, read_user_info_obj, True)
        self.assertTrue(len(subj_ret1) == len(subj_ret2))
        self.assertTrue(subj_ret1[0] == subj_ret2[0])
        self.assertTrue(subj_ret1[1][0]._id == subj_ret2[1][0]._id)

        subj_ret3 = self.resource_registry_service.find_subjects(None, PRED.hasInfo, user_info_obj_id, True)
        subj_ret4 = self.resource_registry_service.find_subjects(None, None, read_user_info_obj, True)
        self.assertTrue(len(subj_ret3) == len(subj_ret4))
        self.assertTrue(subj_ret3[0] == subj_ret4[0])
        self.assertTrue(subj_ret3[1][0]._id == subj_ret4[1][0]._id)

        subj_ret5 = self.resource_registry_service.find_subjects(None, PRED.hasInfo, user_info_obj_id, False)
        subj_ret6 = self.resource_registry_service.find_subjects(None, None, read_user_info_obj, False)
        self.assertTrue(len(subj_ret5) == len(subj_ret6))
        self.assertTrue(subj_ret5[0][0]._id == subj_ret6[0][0]._id)
        self.assertTrue(subj_ret5[1][0]._id == subj_ret6[1][0]._id)

        # Find subjects (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_subjects(None, None, None)
        self.assertTrue(cm.exception.message == "Must provide object")

        with self.assertRaises(AttributeError) as cm:
            self.resource_registry_service.find_subjects(RT.UserCredentials, PRED.bogus, user_info_obj_id, True)
        self.assertTrue(cm.exception.message == "bogus")

        ret = self.resource_registry_service.find_subjects(RT.UserInfo, PRED.hasCredentials, user_info_obj_id, True)
        self.assertTrue(len(ret[0]) == 0)

        ret = self.resource_registry_service.find_subjects(RT.UserCredentials, PRED.hasInfo, user_info_obj_id, True)
        self.assertTrue(len(ret[0]) == 0)

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_subjects(RT.UserCredentials, PRED.hasInfo, user_info_obj, True)
        self.assertTrue(cm.exception.message == "Object id not available in object")

        # Find objects (good cases)
        subj_ret1 = self.resource_registry_service.find_objects(actor_identity_obj_id, PRED.hasInfo, RT.UserInfo, True)
        subj_ret2 = self.resource_registry_service.find_objects(read_actor_identity_obj, PRED.hasInfo, RT.UserInfo, True)
        self.assertTrue(len(subj_ret1) == len(subj_ret2))
        self.assertTrue(subj_ret1[0] == subj_ret2[0])
        self.assertTrue(subj_ret1[1][0]._id == subj_ret2[1][0]._id)

        subj_ret3 = self.resource_registry_service.find_objects(actor_identity_obj_id, PRED.hasInfo, None, True)
        subj_ret4 = self.resource_registry_service.find_objects(actor_identity_obj_id, None, None, True)
        self.assertTrue(len(subj_ret3) == len(subj_ret4))
        self.assertTrue(subj_ret3[0] == subj_ret4[0])
        self.assertTrue(subj_ret3[1][0]._id == subj_ret4[1][0]._id)

        subj_ret5 = self.resource_registry_service.find_objects(actor_identity_obj_id, PRED.hasInfo, None, False)
        subj_ret6 = self.resource_registry_service.find_objects(read_actor_identity_obj, None, None, False)
        self.assertTrue(len(subj_ret5) == len(subj_ret6))
        self.assertTrue(subj_ret5[0][0]._id == subj_ret6[0][0]._id)
        self.assertTrue(subj_ret5[1][0]._id == subj_ret6[1][0]._id)

        # Find objects (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_objects(None, None, None)
        self.assertTrue(cm.exception.message == "Must provide subject")

        with self.assertRaises(AttributeError) as cm:
            self.resource_registry_service.find_objects(actor_identity_obj_id, PRED.bogus, RT.UserCredentials, True)
        self.assertTrue(cm.exception.message == "bogus")

        ret = self.resource_registry_service.find_objects(actor_identity_obj_id, PRED.hasCredentials, RT.ActorIdentity, True)
        self.assertTrue(len(ret[0]) == 0)

        ret = self.resource_registry_service.find_objects(actor_identity_obj_id, PRED.hasInfo, RT.UserCredentials, True)
        self.assertTrue(len(ret[0]) == 0)

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_objects(actor_identity_obj, PRED.hasInfo, RT.UserInfo, True)
        self.assertTrue(cm.exception.message == "Object id not available in subject")

        # Get association (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.get_association(None, None, None)
        self.assertIn("Illegal parameters", cm.exception.message)


        assoc = self.resource_registry_service.get_association(actor_identity_obj_id, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(assoc._id == assoc_id1)

        # Delete (bad cases)
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.delete_association("bogus")
        self.assertTrue(cm.exception.message == "Object with id bogus does not exist.")

        # Delete other association
        self.resource_registry_service.delete_association(assoc_id1)

        # Delete resources
        self.resource_registry_service.delete(actor_identity_obj_id)
        self.resource_registry_service.delete(user_info_obj_id)

    def _do_test_find_resources(self):
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_resources(RT.UserInfo, LCS.DRAFT, "name", False)
        self.assertTrue(cm.exception.message == "find by name does not support lcstate")
        
        ret = self.resource_registry_service.find_resources(RT.UserInfo, None, "name", False)
        self.assertEquals(len(ret[0]), 0)

        # Instantiate an object
        obj = IonObject("InstrumentAgentInstance", name="name")
        
        # Persist object and read it back
        obj_id, obj_rev = self.resource_registry_service.create(obj)
        read_obj = self.resource_registry_service.read(obj_id)

        ret = self.resource_registry_service.find_resources(RT.InstrumentAgentInstance, None, "name", False)
        self.assertEquals(len(ret[0]), 1)
        self.assertEquals(ret[0][0]._id, read_obj._id)

        ret = self.resource_registry_service.find_resources(RT.InstrumentAgentInstance, LCS.DEPLOYED, None, False)
        self.assertEquals(len(ret[0]), 1)
        self.assertEquals(ret[0][0]._id, read_obj._id)

    def _do_test_find_objects_mult(self):
        dp = DataProcess()
        transform = Transform()
        pd = ProcessDefinition()

        dp_id, _ = self.resource_registry_service.create(dp)
        transform_id, _ = self.resource_registry_service.create(transform)
        pd_id, _ = self.resource_registry_service.create(pd)

        self.resource_registry_service.create_association(subject=dp_id, object=transform_id, predicate=PRED.hasTransform)
        self.resource_registry_service.create_association(subject=transform_id, object=pd_id, predicate=PRED.hasProcessDefinition)

        results, _  = self.resource_registry_service.find_objects_mult(subjects=[dp_id],id_only=True)
        self.assertTrue(results == [transform_id])

        results, _  = self.resource_registry_service.find_objects_mult(subjects=[dp_id, transform_id], id_only=True)
        results.sort()
        correct = [transform_id, pd_id]
        correct.sort()
        self.assertTrue(results == correct)

    @attr('EXT')
    def test_get_resource_extension(self):


        #Testing multiple instrument owners
        subject1 = "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Roger Unwin A254"

        actor_identity_obj1 = IonObject(RT.ActorIdentity, {"name": subject1})
        actor_id1,_ = self.resource_registry_service.create(actor_identity_obj1)

        user_info_obj1 = IonObject(RT.UserInfo, {"name": "Foo"})
        user_info_id1,_ = self.resource_registry_service.create(user_info_obj1)
        self.resource_registry_service.create_association(actor_id1, PRED.hasInfo, user_info_id1)

        subject2 = "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Bob Cumbers A256"

        actor_identity_obj2 = IonObject(RT.ActorIdentity, {"name": subject2})
        actor_id2,_ = self.resource_registry_service.create(actor_identity_obj2)

        user_info_obj2 = IonObject(RT.UserInfo, {"name": "Foo2"})
        user_info_id2,_ = self.resource_registry_service.create(user_info_obj2)
        self.resource_registry_service.create_association(actor_id2, PRED.hasInfo, user_info_id2)

        test_obj = IonObject(RT.InformationResource,  {"name": "TestResource"})
        test_obj_id,_ = self.resource_registry_service.create(test_obj)
        self.resource_registry_service.create_association(test_obj_id, PRED.hasOwner, actor_id1)
        self.resource_registry_service.create_association(test_obj_id, PRED.hasOwner, actor_id2)

        extended_resource = self.resource_registry_service.get_resource_extension(test_obj_id, OT.ExtendedInformationResource )

        self.assertEqual(test_obj_id,extended_resource._id)
        self.assertEqual(len(extended_resource.owners),2)

        extended_resource_list = self.resource_registry_service.get_resource_extension(str([user_info_id1,user_info_id2]), OT.ExtendedInformationResource)
        self.assertEqual(len(extended_resource_list), 2)

        optional_args = {'user_id': user_info_id1}
        extended_resource = self.resource_registry_service.get_resource_extension(test_obj_id, OT.TestExtendedInformationResource, optional_args=optional_args )

        self.assertEqual(test_obj_id,extended_resource._id)
        self.assertEqual(len(extended_resource.owners),2)
        self.assertEqual(extended_resource.user_id, user_info_id1)
class TestExchangeManagementServiceInt(IonIntegrationTestCase):

    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2coi.yml')

        self.ems = ExchangeManagementServiceClient()
        self.rr = ResourceRegistryServiceClient()

        orglist, _ = self.rr.find_resources(RT.Org)
        if not len(orglist) == 1:
            raise StandardError("Unexpected number of orgs found")

        self.org_id = orglist[0]._id

        # we test actual exchange interaction in pyon, so it's fine to mock the broker interaction here
        self.container.ex_manager = Mock(exchange.ExchangeManager)
    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),'Test reaches into container, doesn\'t work with CEI')
    def test_xs_create_delete(self):
        exchange_space = ExchangeSpace(name="bobo")
        esid = self.ems.create_exchange_space(exchange_space, self.org_id)

        # should be able to pull from RR an exchange space
        es2 = self.rr.read(esid)
        self.assertEquals(exchange_space.name, es2.name)

        # should have an exchange declared on the broker
        self.container.ex_manager.create_xs.assert_called_once_with('bobo', use_ems=False)

        # should have an assoc to an org
        orglist, _ = self.rr.find_subjects(RT.Org, PRED.hasExchangeSpace, esid, id_only=True)
        self.assertEquals(len(orglist), 1)
        self.assertEquals(orglist[0], self.org_id)

        self.ems.delete_exchange_space(esid)

        # should no longer have that id in the RR
        self.assertRaises(NotFound, self.rr.read, esid)

        # should no longer have an assoc to an org
        orglist2, _ = self.rr.find_subjects(RT.Org, PRED.hasExchangeSpace, esid, id_only=True)
        self.assertEquals(len(orglist2), 0)

        # should no longer have that exchange declared
        self.assertEquals(self.container.ex_manager.delete_xs.call_count, 1)
        self.assertIn('bobo', self.container.ex_manager.delete_xs.call_args[0][0].exchange) # prefixed with sysname

    def test_xs_delete_without_create(self):
        self.assertRaises(NotFound, self.ems.delete_exchange_space, '123')

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),'Test reaches into container, doesn\'t work with CEI')
    def test_xp_create_delete(self):

        # xp needs an xs first
        exchange_space = ExchangeSpace(name="doink")
        esid = self.ems.create_exchange_space(exchange_space, self.org_id)

        exchange_point = ExchangePoint(name="hammer")
        epid = self.ems.create_exchange_point(exchange_point, esid)

        # should be in RR
        ep2 = self.rr.read(epid)
        self.assertEquals(exchange_point.name, ep2.name)

        # should be associated to the XS as well
        xslist, _ = self.rr.find_subjects(RT.ExchangeSpace, PRED.hasExchangePoint, epid, id_only=True)
        self.assertEquals(len(xslist), 1)
        self.assertEquals(xslist[0], esid)

        # should exist on broker (both xp and xs)
        self.container.ex_manager.create_xs.assert_called_once_with('doink', use_ems=False)
        self.assertIn('hammer', self.container.ex_manager.create_xp.call_args[0])

        self.ems.delete_exchange_point(epid)
        self.ems.delete_exchange_space(esid)

        # should no longer be in RR
        self.assertRaises(NotFound, self.rr.read, epid)

        # should no longer be associated
        xslist2, _ = self.rr.find_subjects(RT.ExchangeSpace, PRED.hasExchangePoint, epid, id_only=True)
        self.assertEquals(len(xslist2), 0)

        # should no longer exist on broker (both xp and xs)

    def test_xp_create_then_delete_xs(self):

        # xp needs an xs first
        exchange_space = ExchangeSpace(name="doink")
        esid = self.ems.create_exchange_space(exchange_space, self.org_id)

        exchange_point = ExchangePoint(name="hammer")
        epid = self.ems.create_exchange_point(exchange_point, esid)

        # delete xs
        self.ems.delete_exchange_space(esid)

        # should no longer have an association
        xslist2, _ = self.rr.find_subjects(RT.ExchangeSpace, PRED.hasExchangePoint, epid, id_only=True)
        self.assertEquals(len(xslist2), 0)

        # TEST ONLY: have to clean up the xp or we leave junk on the broker
        # we have to do it manually because the xs is gone
        #self.ems.delete_exchange_point(epid)
        xs = exchange.ExchangeSpace(self.container.ex_manager, exchange_space.name)
        xp = exchange.ExchangePoint(self.container.ex_manager, exchange_point.name, xs, 'ttree')
        self.container.ex_manager.delete_xp(xp, use_ems=False)

    def test_xs_create_update(self):
        raise unittest.SkipTest("Test not implemented yet")

    def test_xn_declare_and_undeclare(self):

        # xn needs an xs first
        exchange_space = ExchangeSpace(name="bozo")
        esid = self.ems.create_exchange_space(exchange_space, self.org_id)

        exchange_name = ExchangeName(name='shoes', xn_type="XN_PROCESS")
        enid = self.ems.declare_exchange_name(exchange_name, esid)

        # should be in RR
        en2 = self.rr.read(enid)
        self.assertEquals(exchange_name.name, en2.name)

        # should have an assoc from XN to XS
        xnlist, _ = self.rr.find_subjects(RT.ExchangeSpace, PRED.hasExchangeName, enid, id_only=True)
        self.assertEquals(len(xnlist), 1)
        self.assertEquals(xnlist[0], esid)

        # container API got called, will have declared a queue
        self.ems.undeclare_exchange_name(enid)      # canonical name = xn id in current impl

    def test_xn_declare_no_xs(self):
        exchange_name = ExchangeName(name="shoez", xn_type='XN_PROCESS')
        self.assertRaises(NotFound, self.ems.declare_exchange_name, exchange_name, '11')

    def test_xn_undeclare_without_declare(self):
        self.assertRaises(NotFound, self.ems.undeclare_exchange_name, 'some_non_id')

    def test_xn_declare_then_delete_xs(self):

        # xn needs an xs first
        exchange_space = ExchangeSpace(name="bozo")
        esid = self.ems.create_exchange_space(exchange_space, self.org_id)

        exchange_name = ExchangeName(name='shnoz', xn_type="XN_PROCESS")
        enid = self.ems.declare_exchange_name(exchange_name, esid)

        # delete the XS
        self.ems.delete_exchange_space(esid)

        # no longer should have assoc from XS to XN
        xnlist, _ = self.rr.find_subjects(RT.ExchangeSpace, PRED.hasExchangeName, enid, id_only=True)
        self.assertEquals(len(xnlist), 0)
示例#46
0
class VisStreamLauncher(ImmediateProcess):
    """
    Class emulates a stream source from a NetCDF file. It emits a record of data every few seconds on a
    stream identified by a routing key.

    """
    def on_init(self):
        log.debug("VizStreamProducer init. Self.id=%s" % self.id)

    def on_start(self):

        log.debug("VizStreamProducer start")
        self.data_source_name = self.CFG.get('name')
        self.dataset = self.CFG.get('dataset')

        # create a pubsub client and a resource registry client
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.pubsubclient = PubsubManagementServiceClient(
            node=self.container.node)

        # Dummy instrument related clients
        self.imsclient = InstrumentManagementServiceClient(
            node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(
            node=self.container.node)
        self.dpclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.IngestClient = IngestionManagementServiceClient(
            node=self.container.node)

        # create the pubsub client
        self.pubsubclient = PubsubManagementServiceClient(
            node=self.container.node)

        # Additional code for creating a dummy instrument
        """
        # Set up the preconditions. Look for an existing ingestion config
        while True:
            log.info("VisStreamLauncher:on_start: Waiting for an ingestion configuration to be available.")
            ingestion_cfgs, _ = self.rrclient.find_resources(RT.IngestionConfiguration, None, None, True)

            if len(ingestion_cfgs) > 0:
                break
            else:
                gevent.sleep(1)
        """

        # Check to see if the data_product already exists in the system (for e.g re launching the code after a crash)
        dp_ids, _ = self.rrclient.find_resources(RT.DataProduct, None,
                                                 self.data_source_name, True)
        if len(dp_ids) > 0:
            data_product_id = dp_ids[0]
            print '>>>>>>>>>>>>> Found dp_id = ', data_product_id
        else:
            # Create InstrumentModel
            instModel_obj = IonObject(RT.InstrumentModel,
                                      name=self.data_source_name,
                                      description=self.data_source_name,
                                      model_label=self.data_source_name)
            instModel_id = self.imsclient.create_instrument_model(
                instModel_obj)

            # Create InstrumentDevice
            instDevice_obj = IonObject(RT.InstrumentDevice,
                                       name=self.data_source_name,
                                       description=self.data_source_name,
                                       serial_number="12345")

            instDevice_id = self.imsclient.create_instrument_device(
                instrument_device=instDevice_obj)
            self.imsclient.assign_instrument_model_to_instrument_device(
                instModel_id, instDevice_id)

            # create a stream definition for the data from the ctd simulator
            ctd_stream_def = SBE37_CDM_stream_definition()
            ctd_stream_def_id = self.pubsubclient.create_stream_definition(
                container=ctd_stream_def)

            print 'Creating new CDM data product with a stream definition'
            dp_obj = IonObject(RT.DataProduct,
                               name=self.data_source_name,
                               description='ctd stream test')
            data_product_id = self.dpclient.create_data_product(
                dp_obj, ctd_stream_def_id)

            self.damsclient.assign_data_product(
                input_resource_id=instDevice_id,
                data_product_id=data_product_id)
            self.dpclient.activate_data_product_persistence(
                data_product_id=data_product_id,
                persist_data=True,
                persist_metadata=True)

            print '>>>>>>>>>>>> New dp_id = ', data_product_id

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(data_product_id,
                                                   PRED.hasStream, None, True)

        if self.dataset == 'sinusoidal':
            pid = self.container.spawn_process(
                name='ctd_test.' + self.data_source_name,
                module='ion.processes.data.sinusoidal_stream_publisher',
                cls='SinusoidalCtdPublisher',
                config={'process': {
                    'stream_id': stream_ids[0]
                }})
        else:
            pid = self.container.spawn_process(
                name='ctd_test.' + self.data_source_name,
                module='ion.processes.data.ctd_stream_publisher',
                cls='SimpleCtdPublisher',
                config={'process': {
                    'stream_id': stream_ids[0]
                }})

    def on_quit(self):
        log.debug("VizStreamProducer quit")
class TestDataProductProvenance(IonIntegrationTestCase):

    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.pubsubclient =  PubsubManagementServiceClient(node=self.container.node)
        self.ingestclient = IngestionManagementServiceClient(node=self.container.node)
        self.dpmsclient = DataProductManagementServiceClient(node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
        self.omsclient = ObservatoryManagementServiceClient(node=self.container.node)
        self.process_dispatcher   = ProcessDispatcherServiceClient()

        self.dataset_management = DatasetManagementServiceClient()

        # create missing data process definition
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name=LOGICAL_TRANSFORM_DEFINITION_NAME,
                            description="normally in preload",
                            module='ion.processes.data.transforms.logical_transform',
                            class_name='logical_transform')
        self.dataprocessclient.create_data_process_definition(dpd_obj)

        # deactivate all data processes when tests are complete
        def killAllDataProcesses():
            for proc_id in self.rrclient.find_resources(RT.DataProcess, None, None, True)[0]:
                self.dataprocessclient.deactivate_data_process(proc_id)
                self.dataprocessclient.delete_data_process(proc_id)
        self.addCleanup(killAllDataProcesses)


    #@unittest.skip('not ready')
    def test_get_provenance(self):

        #create a deployment with metadata and an initial site and device
        instrument_site_obj = IonObject(RT.InstrumentSite,
                                        name='InstrumentSite1',
                                        description='test instrument site')
        instrument_site_id = self.omsclient.create_instrument_site(instrument_site_obj, "")
        log.debug( 'test_get_provenance: new instrument_site_id id = %s ', str(instrument_site_id))


        # Create InstrumentModel
        instModel_obj = IonObject(RT.InstrumentModel,
                                  name='SBE37IMModel',
                                  description="SBE37IMModel" )

        try:
            instModel_id = self.imsclient.create_instrument_model(instModel_obj)
        except BadRequest as ex:
            self.fail("failed to create new InstrumentModel: %s" %ex)
        log.debug( 'test_get_provenance: new InstrumentModel id = %s ', str(instModel_id))

        self.omsclient.assign_instrument_model_to_instrument_site(instModel_id, instrument_site_id)


        # Create InstrumentAgent
        parsed_config = StreamConfiguration(stream_name='parsed', parameter_dictionary_name='ctd_parsed_param_dict', records_per_granule=2, granule_publish_rate=5 )
        instAgent_obj = IonObject(RT.InstrumentAgent,
                                name='agent007',
                                description="SBE37IMAgent",
                                driver_uri="http://sddevrepo.oceanobservatories.org/releases/seabird_sbe37smb_ooicore-0.0.1-py2.7.egg",
                                stream_configurations = [parsed_config] )
        try:
            instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj)
        except BadRequest as ex:
            self.fail("failed to create new InstrumentAgent: %s" %ex)
        log.debug( 'test_get_provenance:new InstrumentAgent id = %s', instAgent_id)

        self.imsclient.assign_instrument_model_to_instrument_agent(instModel_id, instAgent_id)

        # Create InstrumentDevice
        log.debug('test_get_provenance: Create instrument resource to represent the SBE37 (SA Req: L4-CI-SA-RQ-241) ')
        instDevice_obj = IonObject(RT.InstrumentDevice, name='SBE37IMDevice', description="SBE37IMDevice", serial_number="12345" )
        try:
            instDevice_id = self.imsclient.create_instrument_device(instrument_device=instDevice_obj)
            self.imsclient.assign_instrument_model_to_instrument_device(instModel_id, instDevice_id)
        except BadRequest as ex:
            self.fail("failed to create new InstrumentDevice: %s" %ex)

        log.debug("test_get_provenance: new InstrumentDevice id = %s    (SA Req: L4-CI-SA-RQ-241) ", instDevice_id)


        #-------------------------------
        # Create CTD Parsed  data product
        #-------------------------------
        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()


        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        parsed_stream_def_id = self.pubsubclient.create_stream_definition(name='parsed', parameter_dictionary_id=pdict_id)

        log.debug( 'test_get_provenance:Creating new CDM data product with a stream definition')


        dp_obj = IonObject(RT.DataProduct,
            name='the parsed data',
            description='ctd stream test',
            temporal_domain = tdom,
            spatial_domain = sdom)

        ctd_parsed_data_product = self.dpmsclient.create_data_product(data_product=dp_obj, stream_definition_id=parsed_stream_def_id)
        log.debug( 'new dp_id = %s', ctd_parsed_data_product)

        self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=ctd_parsed_data_product)
        self.dpmsclient.activate_data_product_persistence(data_product_id=ctd_parsed_data_product)

        #-------------------------------
        # create a data product for the site to pass the OMS check.... we need to remove this check
        #-------------------------------
        dp_obj = IonObject(RT.DataProduct,
            name='DP1',
            description='some new dp',
            temporal_domain = tdom,
            spatial_domain = sdom)

        log_data_product_id = self.dpmsclient.create_data_product(dp_obj, parsed_stream_def_id)
        self.omsclient.create_site_data_product(instrument_site_id, log_data_product_id)


        #-------------------------------
        # Deploy instrument device to instrument site
        #-------------------------------
        deployment_obj = IonObject(RT.Deployment,
                                        name='TestDeployment',
                                        description='some new deployment')
        deployment_id = self.omsclient.create_deployment(deployment_obj)
        self.omsclient.deploy_instrument_site(instrument_site_id, deployment_id)
        self.imsclient.deploy_instrument_device(instDevice_id, deployment_id)

        log.debug("test_create_deployment: created deployment id: %s ", str(deployment_id) )

        self.omsclient.activate_deployment(deployment_id)
        inst_device_objs, _ = self.rrclient.find_objects(subject=instrument_site_id, predicate=PRED.hasDevice, object_type=RT.InstrumetDevice, id_only=False)
        log.debug("test_create_deployment: deployed device: %s ", str(inst_device_objs[0]) )

        #-------------------------------
        # Create the agent instance
        #-------------------------------

        port_agent_config = {
            'device_addr':  CFG.device.sbe37.host,
            'device_port':  CFG.device.sbe37.port,
            'process_type': PortAgentProcessType.UNIX,
            'binary_path': "port_agent",
            'port_agent_addr': 'localhost',
            'command_port': CFG.device.sbe37.port_agent_cmd_port,
            'data_port': CFG.device.sbe37.port_agent_data_port,
            'log_level': 5,
            'type': PortAgentType.ETHERNET
        }


        instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='SBE37IMAgentInstance',
            description="SBE37IMAgentInstance",
            port_agent_config = port_agent_config)

        instAgentInstance_id = self.imsclient.create_instrument_agent_instance(instAgentInstance_obj, instAgent_id, instDevice_id)


        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Data Process Definition
        #-------------------------------
        log.debug("TestDataProductProvenance: create data process definition ctd_L0_all")
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='ctd_L0_all',
                            description='transform ctd package into three separate L0 streams',
                            module='ion.processes.data.transforms.ctd.ctd_L0_all',
                            class_name='ctd_L0_all')
        try:
            ctd_L0_all_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
        except BadRequest as ex:
            self.fail("failed to create new ctd_L0_all data process definition: %s" %ex)


        #-------------------------------
        # L1 Conductivity: Data Process Definition
        #-------------------------------
        log.debug("TestDataProductProvenance: create data process definition CTDL1ConductivityTransform")
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='ctd_L1_conductivity',
                            description='create the L1 conductivity data product',
                            module='ion.processes.data.transforms.ctd.ctd_L1_conductivity',
                            class_name='CTDL1ConductivityTransform')
        try:
            ctd_L1_conductivity_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
        except BadRequest as ex:
            self.fail("failed to create new CTDL1ConductivityTransform data process definition: %s" %ex)

        #-------------------------------
        # L1 Pressure: Data Process Definition
        #-------------------------------
        log.debug("TestDataProductProvenance: create data process definition CTDL1PressureTransform")
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='ctd_L1_pressure',
                            description='create the L1 pressure data product',
                            module='ion.processes.data.transforms.ctd.ctd_L1_pressure',
                            class_name='CTDL1PressureTransform')
        try:
            ctd_L1_pressure_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
        except BadRequest as ex:
            self.fail("failed to create new CTDL1PressureTransform data process definition: %s" %ex)


        #-------------------------------
        # L1 Temperature: Data Process Definition
        #-------------------------------
        log.debug("TestDataProductProvenance: create data process definition CTDL1TemperatureTransform")
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='ctd_L1_temperature',
                            description='create the L1 temperature data product',
                            module='ion.processes.data.transforms.ctd.ctd_L1_temperature',
                            class_name='CTDL1TemperatureTransform')
        try:
            ctd_L1_temperature_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
        except BadRequest as ex:
            self.fail("failed to create new CTDL1TemperatureTransform data process definition: %s" %ex)


        #-------------------------------
        # L2 Salinity: Data Process Definition
        #-------------------------------
        log.debug("TestDataProductProvenance: create data process definition SalinityTransform")
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='ctd_L2_salinity',
                            description='create the L1 temperature data product',
                            module='ion.processes.data.transforms.ctd.ctd_L2_salinity',
                            class_name='SalinityTransform')
        try:
            ctd_L2_salinity_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
        except BadRequest as ex:
            self.fail("failed to create new SalinityTransform data process definition: %s" %ex)


        #-------------------------------
        # L2 Density: Data Process Definition
        #-------------------------------
        log.debug("TestDataProductProvenance: create data process definition DensityTransform")
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='ctd_L2_density',
                            description='create the L1 temperature data product',
                            module='ion.processes.data.transforms.ctd.ctd_L2_density',
                            class_name='DensityTransform')
        try:
            ctd_L2_density_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
        except BadRequest as ex:
            self.fail("failed to create new DensityTransform data process definition: %s" %ex)



        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Output Data Products
        #-------------------------------

        outgoing_stream_l0_conductivity_id = self.pubsubclient.create_stream_definition(name='L0_Conductivity', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_conductivity_id, ctd_L0_all_dprocdef_id, binding='conductivity' )

        outgoing_stream_l0_pressure_id = self.pubsubclient.create_stream_definition(name='L0_Pressure', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_pressure_id, ctd_L0_all_dprocdef_id, binding='pressure' )

        outgoing_stream_l0_temperature_id = self.pubsubclient.create_stream_definition(name='L0_Temperature', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_temperature_id, ctd_L0_all_dprocdef_id, binding='temperature' )


        self.output_products={}
        log.debug("TestDataProductProvenance: create output data product L0 conductivity")

        ctd_l0_conductivity_output_dp_obj = IonObject(  RT.DataProduct,
                                                        name='L0_Conductivity',
                                                        description='transform output conductivity',
                                                        temporal_domain = tdom,
                                                        spatial_domain = sdom)

        ctd_l0_conductivity_output_dp_id = self.dpmsclient.create_data_product(ctd_l0_conductivity_output_dp_obj,
                                                                                outgoing_stream_l0_conductivity_id)

        self.output_products['conductivity'] = ctd_l0_conductivity_output_dp_id


        log.debug("TestDataProductProvenance: create output data product L0 pressure")

        ctd_l0_pressure_output_dp_obj = IonObject(  RT.DataProduct,
                                                    name='L0_Pressure',
                                                    description='transform output pressure',
                                                    temporal_domain = tdom,
                                                    spatial_domain = sdom)

        ctd_l0_pressure_output_dp_id = self.dpmsclient.create_data_product(ctd_l0_pressure_output_dp_obj,
                                                                            outgoing_stream_l0_pressure_id)
        self.output_products['pressure'] = ctd_l0_pressure_output_dp_id

        log.debug("TestDataProductProvenance: create output data product L0 temperature")

        ctd_l0_temperature_output_dp_obj = IonObject(   RT.DataProduct,
                                                        name='L0_Temperature',
                                                        description='transform output temperature',
                                                        temporal_domain = tdom,
                                                        spatial_domain = sdom)

        ctd_l0_temperature_output_dp_id = self.dpmsclient.create_data_product(ctd_l0_temperature_output_dp_obj,
                                                                                outgoing_stream_l0_temperature_id)
        self.output_products['temperature'] = ctd_l0_temperature_output_dp_id


        #-------------------------------
        # L1 Conductivity - Temperature - Pressure: Output Data Products
        #-------------------------------

        outgoing_stream_l1_conductivity_id = self.pubsubclient.create_stream_definition(name='L1_conductivity', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l1_conductivity_id, ctd_L1_conductivity_dprocdef_id, binding='conductivity' )

        outgoing_stream_l1_pressure_id = self.pubsubclient.create_stream_definition(name='L1_Pressure', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l1_pressure_id, ctd_L1_pressure_dprocdef_id, binding='pressure' )

        outgoing_stream_l1_temperature_id = self.pubsubclient.create_stream_definition(name='L1_Temperature', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l1_temperature_id, ctd_L1_temperature_dprocdef_id, binding='temperature' )

        log.debug("TestDataProductProvenance: create output data product L1 conductivity")

        ctd_l1_conductivity_output_dp_obj = IonObject(RT.DataProduct,
            name='L1_Conductivity',
            description='transform output L1 conductivity',
            temporal_domain = tdom,
            spatial_domain = sdom)

        ctd_l1_conductivity_output_dp_id = self.dpmsclient.create_data_product(ctd_l1_conductivity_output_dp_obj,
                                                                                outgoing_stream_l1_conductivity_id)


        log.debug("TestDataProductProvenance: create output data product L1 pressure")

        ctd_l1_pressure_output_dp_obj = IonObject(  RT.DataProduct,
                                                    name='L1_Pressure',
                                                    description='transform output L1 pressure',
                                                    temporal_domain = tdom,
                                                    spatial_domain = sdom)

        ctd_l1_pressure_output_dp_id = self.dpmsclient.create_data_product(ctd_l1_pressure_output_dp_obj,
                                                                            outgoing_stream_l1_pressure_id)


        log.debug("TestDataProductProvenance: create output data product L1 temperature")

        ctd_l1_temperature_output_dp_obj = IonObject(   RT.DataProduct,
                                                        name='L1_Temperature',
                                                        description='transform output L1 temperature',
                                                        temporal_domain = tdom,
                                                        spatial_domain = sdom)

        ctd_l1_temperature_output_dp_id = self.dpmsclient.create_data_product(ctd_l1_temperature_output_dp_obj,
                                                                                outgoing_stream_l1_temperature_id)

        #-------------------------------
        # L2 Salinity - Density: Output Data Products
        #-------------------------------

        outgoing_stream_l2_salinity_id = self.pubsubclient.create_stream_definition(name='L2_salinity', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l2_salinity_id, ctd_L2_salinity_dprocdef_id, binding='salinity' )

        outgoing_stream_l2_density_id = self.pubsubclient.create_stream_definition(name='L2_Density', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l2_density_id, ctd_L2_density_dprocdef_id, binding='density' )

        log.debug("TestDataProductProvenance: create output data product L2 Salinity")

        ctd_l2_salinity_output_dp_obj = IonObject( RT.DataProduct,
                                                    name='L2_Salinity',
                                                    description='transform output L2 salinity',
                                                    temporal_domain = tdom,
                                                    spatial_domain = sdom)


        ctd_l2_salinity_output_dp_id = self.dpmsclient.create_data_product(ctd_l2_salinity_output_dp_obj,
                                                                            outgoing_stream_l2_salinity_id)


        log.debug("TestDataProductProvenance: create output data product L2 Density")

#        ctd_l2_density_output_dp_obj = IonObject(   RT.DataProduct,
#                                                    name='L2_Density',
#                                                    description='transform output pressure',
#                                                    temporal_domain = tdom,
#                                                    spatial_domain = sdom)
#
#        ctd_l2_density_output_dp_id = self.dpmsclient.create_data_product(ctd_l2_density_output_dp_obj,
#                                                                            outgoing_stream_l2_density_id,
#                                                                            parameter_dictionary)

        contactInfo = ContactInformation()
        contactInfo.individual_names_given = "Bill"
        contactInfo.individual_name_family = "Smith"
        contactInfo.street_address = "111 First St"
        contactInfo.city = "San Diego"
        contactInfo.email = "*****@*****.**"
        contactInfo.phones = ["858-555-6666"]
        contactInfo.country = "USA"
        contactInfo.postal_code = "92123"

        ctd_l2_density_output_dp_obj = IonObject(   RT.DataProduct,
                                                    name='L2_Density',
                                                    description='transform output pressure',
                                                    contacts = [contactInfo],
                                                    iso_topic_category = "my_iso_topic_category_here",
                                                    quality_control_level = "1",
                                                    temporal_domain = tdom,
                                                    spatial_domain = sdom)

        ctd_l2_density_output_dp_id = self.dpmsclient.create_data_product(ctd_l2_density_output_dp_obj,
                                                                            outgoing_stream_l2_density_id)

        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Create the data process
        #-------------------------------
        log.debug("TestDataProductProvenance: create L0 all data_process start")
        try:
            ctd_l0_all_data_process_id = self.dataprocessclient.create_data_process(ctd_L0_all_dprocdef_id, [ctd_parsed_data_product], self.output_products)
            #activate only this data process just for coverage
            self.dataprocessclient.activate_data_process(ctd_l0_all_data_process_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" %ex)

        contents = "this is the lookup table  contents, replace with a file..."
        att = IonObject(RT.Attachment, name='deviceLookupTable', content=base64.encodestring(contents), keywords=['DataProcessInput'], attachment_type=AttachmentType.ASCII)
        deviceAttachment = self.rrclient.create_attachment(ctd_l0_all_data_process_id, att)
        log.info( 'test_createTransformsThenActivateInstrument: InstrumentDevice attachment id = %s', deviceAttachment)

        log.debug("TestDataProductProvenance: create L0 all data_process return")


        #-------------------------------
        # L1 Conductivity: Create the data process
        #-------------------------------
        log.debug("TestDataProductProvenance: create L1 Conductivity data_process start")
        try:
            l1_conductivity_data_process_id = self.dataprocessclient.create_data_process(ctd_L1_conductivity_dprocdef_id, [ctd_l0_conductivity_output_dp_id], {'conductivity':ctd_l1_conductivity_output_dp_id})
            self.dataprocessclient.activate_data_process(l1_conductivity_data_process_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" %ex)




        #-------------------------------
        # L1 Pressure: Create the data process
        #-------------------------------
        log.debug("TestDataProductProvenance: create L1_Pressure data_process start")
        try:
            l1_pressure_data_process_id = self.dataprocessclient.create_data_process(ctd_L1_pressure_dprocdef_id, [ctd_l0_pressure_output_dp_id], {'pressure':ctd_l1_pressure_output_dp_id})
            self.dataprocessclient.activate_data_process(l1_pressure_data_process_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" %ex)


        #-------------------------------
        # L1 Temperature: Create the data process
        #-------------------------------
        log.debug("TestDataProductProvenance: create L1_Pressure data_process start")
        try:
            l1_temperature_all_data_process_id = self.dataprocessclient.create_data_process(ctd_L1_temperature_dprocdef_id, [ctd_l0_temperature_output_dp_id], {'temperature':ctd_l1_temperature_output_dp_id})
            self.dataprocessclient.activate_data_process(l1_temperature_all_data_process_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" %ex)

        #-------------------------------
        # L2 Salinity: Create the data process
        #-------------------------------
        log.debug("TestDataProductProvenance: create L2_salinity data_process start")
        try:
            l2_salinity_all_data_process_id = self.dataprocessclient.create_data_process(ctd_L2_salinity_dprocdef_id, [ctd_l1_conductivity_output_dp_id, ctd_l1_pressure_output_dp_id, ctd_l1_temperature_output_dp_id], {'salinity':ctd_l2_salinity_output_dp_id})
            self.dataprocessclient.activate_data_process(l2_salinity_all_data_process_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" %ex)


        #-------------------------------
        # L2 Density: Create the data process
        #-------------------------------
        log.debug("TestDataProductProvenance: create L2_Density data_process start")
        try:
            l2_density_all_data_process_id = self.dataprocessclient.create_data_process(ctd_L2_density_dprocdef_id, [ctd_l1_conductivity_output_dp_id, ctd_l1_pressure_output_dp_id, ctd_l1_temperature_output_dp_id], {'density':ctd_l2_density_output_dp_id})
            self.dataprocessclient.activate_data_process(l2_density_all_data_process_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" %ex)



        #-------------------------------
        # Launch InstrumentAgentInstance, connect to the resource agent client
        #-------------------------------
        self.imsclient.start_instrument_agent_instance(instrument_agent_instance_id=instAgentInstance_id)

        inst_agent_instance_obj= self.imsclient.read_instrument_agent_instance(instAgentInstance_id)
        print 'TestDataProductProvenance: Instrument agent instance obj: = ', inst_agent_instance_obj

        # Start a resource agent client to talk with the instrument agent.
#        self._ia_client = ResourceAgentClient('iaclient', name=inst_agent_instance_obj.agent_process_id,  process=FakeProcess())
#        print 'activate_instrument: got ia client %s', self._ia_client
#        log.debug(" test_createTransformsThenActivateInstrument:: got ia client %s", str(self._ia_client))


        #-------------------------------
        # Deactivate InstrumentAgentInstance
        #-------------------------------
        self.imsclient.stop_instrument_agent_instance(instrument_agent_instance_id=instAgentInstance_id)

        self.dataprocessclient.deactivate_data_process(l2_density_all_data_process_id)
        self.dataprocessclient.deactivate_data_process(l2_salinity_all_data_process_id)
        self.dataprocessclient.deactivate_data_process(l1_temperature_all_data_process_id)
        self.dataprocessclient.deactivate_data_process(l1_pressure_data_process_id)
        self.dataprocessclient.deactivate_data_process(l1_conductivity_data_process_id)
        self.dataprocessclient.deactivate_data_process(ctd_l0_all_data_process_id)

        #-------------------------------
        # Retrieve the provenance info for the ctd density data product
        #-------------------------------
        provenance_dict = self.dpmsclient.get_data_product_provenance(ctd_l2_density_output_dp_id)
        log.debug("TestDataProductProvenance: provenance_dict  %s", str(provenance_dict))

        #validate that products are represented
        self.assertTrue (provenance_dict[str(ctd_l1_conductivity_output_dp_id)])
        self.assertTrue (provenance_dict[str(ctd_l0_conductivity_output_dp_id)])
        self.assertTrue (provenance_dict[str(ctd_l2_density_output_dp_id)])
        self.assertTrue (provenance_dict[str(ctd_l1_temperature_output_dp_id)])
        self.assertTrue (provenance_dict[str(ctd_l0_temperature_output_dp_id)])

        density_dict = (provenance_dict[str(ctd_l2_density_output_dp_id)])
        self.assertEquals(density_dict['producer'], [l2_density_all_data_process_id])


        #-------------------------------
        # Retrieve the extended resource for this data product
        #-------------------------------
        extended_product = self.dpmsclient.get_data_product_extension(ctd_l2_density_output_dp_id)
        self.assertEqual(1, len(extended_product.data_processes) )
        self.assertEqual(3, len(extended_product.process_input_data_products) )
#        log.debug("TestDataProductProvenance: DataProduct provenance_product_list  %s", str(extended_product.provenance_product_list))
#        log.debug("TestDataProductProvenance: DataProduct data_processes  %s", str(extended_product.data_processes))
#        log.debug("TestDataProductProvenance: DataProduct process_input_data_products  %s", str(extended_product.process_input_data_products))
#        log.debug("TestDataProductProvenance: provenance  %s", str(extended_product.computed.provenance.value))

        #-------------------------------
        # Retrieve the extended resource for this data process
        #-------------------------------
        extended_process_def = self.dataprocessclient.get_data_process_definition_extension(ctd_L0_all_dprocdef_id)

#        log.debug("TestDataProductProvenance: DataProcess extended_process_def  %s", str(extended_process_def))
#        log.debug("TestDataProductProvenance: DataProcess data_processes  %s", str(extended_process_def.data_processes))
#        log.debug("TestDataProductProvenance: DataProcess data_products  %s", str(extended_process_def.data_products))
        self.assertEqual(1, len(extended_process_def.data_processes) )
        self.assertEqual(3, len(extended_process_def.output_stream_definitions) )
        self.assertEqual(3, len(extended_process_def.data_products) ) #one list because of one data process

        #-------------------------------
        # Request the xml report
        #-------------------------------
        results = self.dpmsclient.get_data_product_provenance_report(ctd_l2_density_output_dp_id)


        #-------------------------------
        # Cleanup
        #-------------------------------

        self.dpmsclient.delete_data_product(ctd_parsed_data_product)
        self.dpmsclient.delete_data_product(log_data_product_id)
        self.dpmsclient.delete_data_product(ctd_l0_conductivity_output_dp_id)
        self.dpmsclient.delete_data_product(ctd_l0_pressure_output_dp_id)
        self.dpmsclient.delete_data_product(ctd_l0_temperature_output_dp_id)
        self.dpmsclient.delete_data_product(ctd_l1_conductivity_output_dp_id)
        self.dpmsclient.delete_data_product(ctd_l1_pressure_output_dp_id)
        self.dpmsclient.delete_data_product(ctd_l1_temperature_output_dp_id)
        self.dpmsclient.delete_data_product(ctd_l2_salinity_output_dp_id)
        self.dpmsclient.delete_data_product(ctd_l2_density_output_dp_id)
示例#48
0
class TestDeployment(IonIntegrationTestCase):
    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.omsclient = ObservatoryManagementServiceClient(
            node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(
            node=self.container.node)
        self.dmpsclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(
            node=self.container.node)
        self.psmsclient = PubsubManagementServiceClient(
            node=self.container.node)
        self.dataset_management = DatasetManagementServiceClient()

        self.c = DotDict()
        self.c.resource_registry = self.rrclient
        self.RR2 = EnhancedResourceRegistryClient(self.rrclient)

        self.dsmsclient = DataProcessManagementServiceClient(
            node=self.container.node)

        # deactivate all data processes when tests are complete
        def killAllDataProcesses():
            for proc_id in self.rrclient.find_resources(
                    RT.DataProcess, None, None, True)[0]:
                self.dsmsclient.deactivate_data_process(proc_id)
                self.dsmsclient.delete_data_process(proc_id)

        self.addCleanup(killAllDataProcesses)

    #@unittest.skip("targeting")
    def test_create_deployment(self):

        #create a deployment with metadata and an initial site and device
        platform_site__obj = IonObject(RT.PlatformSite,
                                       name='PlatformSite1',
                                       description='test platform site')
        site_id = self.omsclient.create_platform_site(platform_site__obj)

        platform_device__obj = IonObject(RT.PlatformDevice,
                                         name='PlatformDevice1',
                                         description='test platform device')
        device_id = self.imsclient.create_platform_device(platform_device__obj)

        start = str(int(time.mktime(datetime.datetime(2013, 1,
                                                      1).timetuple())))
        end = str(int(time.mktime(datetime.datetime(2014, 1, 1).timetuple())))
        temporal_bounds = IonObject(OT.TemporalBounds,
                                    name='planned',
                                    start_datetime=start,
                                    end_datetime=end)
        deployment_obj = IonObject(RT.Deployment,
                                   name='TestDeployment',
                                   description='some new deployment',
                                   constraint_list=[temporal_bounds])
        deployment_id = self.omsclient.create_deployment(deployment_obj)
        self.omsclient.assign_site_to_deployment(site_id, deployment_id)
        self.omsclient.assign_device_to_deployment(device_id, deployment_id)

        log.debug("test_create_deployment: created deployment id: %s ",
                  str(deployment_id))

        #retrieve the deployment objects and check that the assoc site and device are attached
        read_deployment_obj = self.omsclient.read_deployment(deployment_id)
        log.debug("test_create_deployment: created deployment obj: %s ",
                  str(read_deployment_obj))

        site_ids, _ = self.rrclient.find_subjects(RT.PlatformSite,
                                                  PRED.hasDeployment,
                                                  deployment_id, True)
        self.assertEqual(len(site_ids), 1)

        device_ids, _ = self.rrclient.find_subjects(RT.PlatformDevice,
                                                    PRED.hasDeployment,
                                                    deployment_id, True)
        self.assertEqual(len(device_ids), 1)

        #delete the deployment
        self.omsclient.force_delete_deployment(deployment_id)
        # now try to get the deleted dp object
        try:
            self.omsclient.read_deployment(deployment_id)
        except NotFound:
            pass
        else:
            self.fail("deleted deployment was found during read")

    #@unittest.skip("targeting")
    def test_prepare_deployment_support(self):

        deploy_sup = self.omsclient.prepare_deployment_support()
        self.assertTrue(deploy_sup)
        self.assertEquals(
            deploy_sup.associations['DeploymentHasInstrumentDevice'].type_,
            "AssocDeploymentInstDevice")
        self.assertEquals(
            deploy_sup.associations['DeploymentHasInstrumentDevice'].resources,
            [])
        self.assertEquals(
            deploy_sup.associations['DeploymentHasInstrumentDevice'].
            associated_resources, [])
        self.assertEquals(
            deploy_sup.associations['DeploymentHasPlatformDevice'].type_,
            "AssocDeploymentPlatDevice")
        self.assertEquals(
            deploy_sup.associations['DeploymentHasPlatformDevice'].resources,
            [])
        self.assertEquals(
            deploy_sup.associations['DeploymentHasPlatformDevice'].
            associated_resources, [])
        self.assertEquals(
            deploy_sup.associations['DeploymentHasInstrumentSite'].type_,
            "AssocDeploymentInstSite")
        self.assertEquals(
            deploy_sup.associations['DeploymentHasInstrumentSite'].resources,
            [])
        self.assertEquals(
            deploy_sup.associations['DeploymentHasInstrumentSite'].
            associated_resources, [])
        self.assertEquals(
            deploy_sup.associations['DeploymentHasPlatformSite'].type_,
            "AssocDeploymentPlatSite")
        self.assertEquals(
            deploy_sup.associations['DeploymentHasPlatformSite'].resources, [])
        self.assertEquals(
            deploy_sup.associations['DeploymentHasPlatformSite'].
            associated_resources, [])

        #create a deployment with metadata and an initial site and device
        platform_site__obj = IonObject(RT.PlatformSite,
                                       name='PlatformSite1',
                                       description='test platform site')
        site_id = self.omsclient.create_platform_site(platform_site__obj)

        platform_device__obj = IonObject(RT.PlatformDevice,
                                         name='PlatformDevice1',
                                         description='test platform device')
        device_id = self.imsclient.create_platform_device(platform_device__obj)

        start = str(int(time.mktime(datetime.datetime(2013, 1,
                                                      1).timetuple())))
        end = str(int(time.mktime(datetime.datetime(2014, 1, 1).timetuple())))
        temporal_bounds = IonObject(OT.TemporalBounds,
                                    name='planned',
                                    start_datetime=start,
                                    end_datetime=end)
        deployment_obj = IonObject(RT.Deployment,
                                   name='TestDeployment',
                                   description='some new deployment',
                                   constraint_list=[temporal_bounds])
        deployment_id = self.omsclient.create_deployment(deployment_obj)

        deploy_sup = self.omsclient.prepare_deployment_support(deployment_id)

        self.assertEquals(
            deploy_sup.associations['DeploymentHasInstrumentDevice'].resources,
            [])
        self.assertEquals(
            deploy_sup.associations['DeploymentHasInstrumentDevice'].
            associated_resources, [])
        self.assertEquals(
            len(deploy_sup.associations['DeploymentHasPlatformDevice'].
                resources), 1)
        self.assertEquals(
            deploy_sup.associations['DeploymentHasPlatformDevice'].
            associated_resources, [])
        self.assertEquals(
            deploy_sup.associations['DeploymentHasInstrumentSite'].resources,
            [])
        self.assertEquals(
            deploy_sup.associations['DeploymentHasInstrumentSite'].
            associated_resources, [])
        self.assertEquals(
            len(deploy_sup.associations['DeploymentHasPlatformSite'].resources
                ), 1)
        self.assertEquals(
            deploy_sup.associations['DeploymentHasPlatformSite'].
            associated_resources, [])

        self.omsclient.assign_site_to_deployment(site_id, deployment_id)
        self.omsclient.assign_device_to_deployment(device_id, deployment_id)

        deploy_sup = self.omsclient.prepare_deployment_support(deployment_id)

        self.assertEquals(
            deploy_sup.associations['DeploymentHasInstrumentDevice'].resources,
            [])
        self.assertEquals(
            deploy_sup.associations['DeploymentHasInstrumentDevice'].
            associated_resources, [])
        self.assertEquals(
            len(deploy_sup.associations['DeploymentHasPlatformDevice'].
                resources), 1)
        self.assertEquals(
            len(deploy_sup.associations['DeploymentHasPlatformDevice'].
                associated_resources), 1)
        self.assertEquals(
            deploy_sup.associations['DeploymentHasInstrumentSite'].resources,
            [])
        self.assertEquals(
            deploy_sup.associations['DeploymentHasInstrumentSite'].
            associated_resources, [])
        self.assertEquals(
            len(deploy_sup.associations['DeploymentHasPlatformSite'].resources
                ), 1)
        self.assertEquals(
            len(deploy_sup.associations['DeploymentHasPlatformSite'].
                associated_resources), 1)

        #delete the deployment
        self.omsclient.force_delete_deployment(deployment_id)
        # now try to get the deleted dp object
        try:
            self.omsclient.read_deployment(deployment_id)
        except NotFound:
            pass
        else:
            self.fail("deleted deployment was found during read")

    #@unittest.skip("targeting")
    def base_activate_deployment(self, make_assigns=False):
        # Create platform site, platform device, platform model

        bounds = GeospatialBounds(geospatial_latitude_limit_north=float(5),
                                  geospatial_latitude_limit_south=float(5),
                                  geospatial_longitude_limit_west=float(15),
                                  geospatial_longitude_limit_east=float(15),
                                  geospatial_vertical_min=float(0),
                                  geospatial_vertical_max=float(1000))

        platform_site__obj = IonObject(RT.PlatformSite,
                                       name='PlatformSite1',
                                       description='test platform site',
                                       constraint_list=[bounds])
        platform_site_id = self.omsclient.create_platform_site(
            platform_site__obj)

        platform_device_obj = IonObject(RT.PlatformDevice,
                                        name='PlatformDevice1',
                                        description='test platform device')
        platform_device_id = self.imsclient.create_platform_device(
            platform_device_obj)

        platform_model__obj = IonObject(RT.PlatformModel,
                                        name='PlatformModel1',
                                        description='test platform model')
        platform_model_id = self.imsclient.create_platform_model(
            platform_model__obj)

        # Create instrument site
        #-------------------------------------------------------------------------------------

        bounds = GeospatialBounds(geospatial_latitude_limit_north=float(45),
                                  geospatial_latitude_limit_south=float(40),
                                  geospatial_longitude_limit_west=float(-75),
                                  geospatial_longitude_limit_east=float(-70),
                                  geospatial_vertical_min=float(0),
                                  geospatial_vertical_max=float(500))

        instrument_site_obj = IonObject(
            RT.InstrumentSite,
            name='InstrumentSite1',
            description='test instrument site',
            reference_designator='GA01SUMO-FI003-01-CTDMO0999',
            constraint_list=[bounds])
        instrument_site_id = self.omsclient.create_instrument_site(
            instrument_site_obj, platform_site_id)

        pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            'ctd_parsed_param_dict', id_only=True)
        ctd_stream_def_id = self.psmsclient.create_stream_definition(
            name='SBE37_CDM', parameter_dictionary_id=pdict_id)

        # Create an instrument device
        instrument_device_obj = IonObject(RT.InstrumentDevice,
                                          name='InstrumentDevice1',
                                          description='test instrument device')
        instrument_device_id = self.imsclient.create_instrument_device(
            instrument_device_obj)
        self.rrclient.create_association(platform_device_id, PRED.hasDevice,
                                         instrument_device_id)

        pp_obj = IonObject(OT.PlatformPort,
                           reference_designator='GA01SUMO-FI003-01-CTDMO0999',
                           port_type=PortTypeEnum.PAYLOAD,
                           ip_address='1')
        port_assignments = {instrument_device_id: pp_obj}

        #----------------------------------------------------------------------------------------------------
        # Create an instrument model
        instrument_model_obj = IonObject(RT.InstrumentModel,
                                         name='InstrumentModel1',
                                         description='test instrument model')
        instrument_model_id = self.imsclient.create_instrument_model(
            instrument_model_obj)

        # Create a deployment object
        #----------------------------------------------------------------------------------------------------

        start = str(int(time.mktime(datetime.datetime(2013, 1,
                                                      1).timetuple())))
        end = str(int(time.mktime(datetime.datetime(2020, 1, 1).timetuple())))
        temporal_bounds = IonObject(OT.TemporalBounds,
                                    name='planned',
                                    start_datetime=start,
                                    end_datetime=end)
        deployment_obj = IonObject(RT.Deployment,
                                   name='TestDeployment',
                                   description='some new deployment',
                                   context=IonObject(
                                       OT.CabledNodeDeploymentContext),
                                   port_assignments=port_assignments,
                                   constraint_list=[temporal_bounds])
        deployment_id = self.omsclient.create_deployment(deployment_obj)

        log.debug("test_create_deployment: created deployment id: %s ",
                  str(deployment_id))

        if make_assigns:
            self.imsclient.assign_platform_model_to_platform_device(
                platform_model_id, platform_device_id)
            self.imsclient.assign_instrument_model_to_instrument_device(
                instrument_model_id, instrument_device_id)
            self.omsclient.assign_platform_model_to_platform_site(
                platform_model_id, platform_site_id)
            self.omsclient.assign_instrument_model_to_instrument_site(
                instrument_model_id, instrument_site_id)

            self.omsclient.assign_site_to_deployment(platform_site_id,
                                                     deployment_id)
            self.omsclient.assign_device_to_deployment(platform_device_id,
                                                       deployment_id)

        ret = DotDict(instrument_site_id=instrument_site_id,
                      instrument_device_id=instrument_device_id,
                      instrument_model_id=instrument_model_id,
                      platform_site_id=platform_site_id,
                      platform_device_id=platform_device_id,
                      platform_model_id=platform_model_id,
                      deployment_id=deployment_id)

        return ret

    def _create_subsequent_deployment(self, prior_dep_info):
        platform_device_obj = IonObject(RT.PlatformDevice,
                                        name='PlatformDevice2',
                                        description='test platform device')
        platform_device_id = self.imsclient.create_platform_device(
            platform_device_obj)

        instrument_device_obj = IonObject(RT.InstrumentDevice,
                                          name='InstrumentDevice2',
                                          description='test instrument device')
        instrument_device_id = self.imsclient.create_instrument_device(
            instrument_device_obj)
        self.rrclient.create_association(platform_device_id, PRED.hasDevice,
                                         instrument_device_id)

        self.imsclient.assign_platform_model_to_platform_device(
            prior_dep_info.platform_model_id, platform_device_id)
        self.imsclient.assign_instrument_model_to_instrument_device(
            prior_dep_info.instrument_model_id, instrument_device_id)

        start = str(int(time.mktime(datetime.datetime(2013, 6,
                                                      1).timetuple())))
        end = str(int(time.mktime(datetime.datetime(2020, 6, 1).timetuple())))
        temporal_bounds = IonObject(OT.TemporalBounds,
                                    name='planned',
                                    start_datetime=start,
                                    end_datetime=end)
        deployment_obj = IonObject(RT.Deployment,
                                   name='TestDeployment2',
                                   description='some new deployment',
                                   context=IonObject(
                                       OT.CabledNodeDeploymentContext),
                                   constraint_list=[temporal_bounds])
        deployment_id = self.omsclient.create_deployment(deployment_obj)

        self.omsclient.assign_site_to_deployment(
            prior_dep_info.platform_site_id, deployment_id)
        self.omsclient.assign_device_to_deployment(
            prior_dep_info.platform_device_id, deployment_id)

        log.debug("test_create_deployment: created deployment id: %s ",
                  str(deployment_id))

        ret = DotDict(instrument_device_id=instrument_device_id,
                      platform_device_id=platform_device_id,
                      deployment_id=deployment_id)

        return ret

    #@unittest.skip("targeting")
    def test_activate_deployment_normal(self):

        res = self.base_activate_deployment(make_assigns=True)

        before_activate_instrument_device_obj = self.rrclient.read(
            res.instrument_device_id)
        self.assertNotEquals(before_activate_instrument_device_obj.lcstate,
                             LCS.DEPLOYED)

        log.debug("activating deployment, expecting success")
        self.omsclient.activate_deployment(res.deployment_id)

        def assertGeospatialBoundsEquals(a, b):
            self.assertEquals(a['geospatial_latitude_limit_north'],
                              b['geospatial_latitude_limit_north'])
            self.assertEquals(a['geospatial_latitude_limit_south'],
                              b['geospatial_latitude_limit_south'])
            self.assertEquals(a['geospatial_longitude_limit_west'],
                              b['geospatial_longitude_limit_west'])
            self.assertEquals(a['geospatial_longitude_limit_east'],
                              b['geospatial_longitude_limit_east'])

        def assertGeospatialBoundsNotEquals(a, b):
            self.assertNotEquals(a['geospatial_latitude_limit_north'],
                                 b['geospatial_latitude_limit_north'])
            self.assertNotEquals(a['geospatial_latitude_limit_south'],
                                 b['geospatial_latitude_limit_south'])
            self.assertNotEquals(a['geospatial_longitude_limit_west'],
                                 b['geospatial_longitude_limit_west'])
            self.assertNotEquals(a['geospatial_longitude_limit_east'],
                                 b['geospatial_longitude_limit_east'])

        after_activate_instrument_device_obj = self.rrclient.read(
            res.instrument_device_id)
        assertGeospatialBoundsNotEquals(
            before_activate_instrument_device_obj.geospatial_bounds,
            after_activate_instrument_device_obj.geospatial_bounds)

        deployment_obj = self.RR2.read(res.deployment_id)
        self.assertEquals(deployment_obj.lcstate, LCS.DEPLOYED)

        log.debug("deactivatin deployment, expecting success")
        self.omsclient.deactivate_deployment(res.deployment_id)

        after_deactivate_instrument_device_obj = self.rrclient.read(
            res.instrument_device_id)
        assertGeospatialBoundsNotEquals(
            after_activate_instrument_device_obj.geospatial_bounds,
            after_deactivate_instrument_device_obj.geospatial_bounds)

        deployment_obj = self.RR2.read(res.deployment_id)
        self.assertEquals(deployment_obj.lcstate, LCS.INTEGRATED)

    def test_activate_deployment_redeploy(self):
        dep_util = DeploymentUtil(self.container)
        res = self.base_activate_deployment(make_assigns=True)

        log.debug("activating first deployment, expecting success")
        self.omsclient.activate_deployment(res.deployment_id)

        deployment_obj1 = self.RR2.read(res.deployment_id)
        self.assertEquals(deployment_obj1.lcstate, LCS.DEPLOYED)

        next_dep_info = self._create_subsequent_deployment(res)

        deployment_obj2 = self.RR2.read(next_dep_info.deployment_id)
        self.assertNotEquals(deployment_obj2.lcstate, LCS.DEPLOYED)

        log.debug("activating subsequent deployment, expecting success")
        self.omsclient.activate_deployment(next_dep_info.deployment_id)

        deployment_obj1 = self.RR2.read(res.deployment_id)
        self.assertEquals(deployment_obj1.lcstate, LCS.INTEGRATED)

        deployment_obj2 = self.RR2.read(next_dep_info.deployment_id)
        self.assertEquals(deployment_obj2.lcstate, LCS.DEPLOYED)

        dep1_tc = dep_util.get_temporal_constraint(deployment_obj1)
        dep2_tc = dep_util.get_temporal_constraint(deployment_obj2)
        self.assertLessEqual(float(dep1_tc.end_datetime),
                             float(dep2_tc.end_datetime))

        log.debug("deactivating second deployment, expecting success")
        self.omsclient.deactivate_deployment(next_dep_info.deployment_id)

        deployment_obj2 = self.RR2.read(next_dep_info.deployment_id)
        self.assertEquals(deployment_obj2.lcstate, LCS.INTEGRATED)

    #@unittest.skip("targeting")
    def test_activate_deployment_nomodels(self):

        res = self.base_activate_deployment()

        self.omsclient.assign_site_to_deployment(res.platform_site_id,
                                                 res.deployment_id)
        self.omsclient.assign_device_to_deployment(res.platform_device_id,
                                                   res.deployment_id)

        log.debug(
            "activating deployment without site+device models, expecting fail")
        self.assert_deploy_fail(res.deployment_id, NotFound, "Expected 1")

        log.debug("assigning instrument site model")
        self.omsclient.assign_instrument_model_to_instrument_site(
            res.instrument_model_id, res.instrument_site_id)

        log.debug(
            "activating deployment without device models, expecting fail")
        self.assert_deploy_fail(res.deployment_id, NotFound, "Expected 1")

    #@unittest.skip("targeting")
    def test_activate_deployment_nosite(self):

        res = self.base_activate_deployment()

        log.debug("assigning instrument models")
        self.imsclient.assign_instrument_model_to_instrument_device(
            res.instrument_model_id, res.instrument_device_id)
        self.omsclient.assign_instrument_model_to_instrument_site(
            res.instrument_model_id, res.instrument_site_id)

        log.debug("deploying instrument device only")
        self.omsclient.assign_device_to_deployment(res.instrument_device_id,
                                                   res.deployment_id)

        log.debug(
            "activating deployment without instrument site, expecting fail")
        self.assert_deploy_fail(res.deployment_id, BadRequest)

    #@unittest.skip("targeting")
    def test_activate_deployment_nodevice(self):

        res = self.base_activate_deployment()

        log.debug("assigning platform and instrument models")
        self.imsclient.assign_instrument_model_to_instrument_device(
            res.instrument_model_id, res.instrument_device_id)
        self.omsclient.assign_instrument_model_to_instrument_site(
            res.instrument_model_id, res.instrument_site_id)

        log.debug("deploying instrument site only")
        self.omsclient.assign_site_to_deployment(res.instrument_site_id,
                                                 res.deployment_id)

        log.debug("activating deployment without device, expecting fail")
        self.assert_deploy_fail(res.deployment_id, BadRequest,
                                "No devices were found in the deployment")

    def assert_deploy_fail(self,
                           deployment_id,
                           err_type=BadRequest,
                           fail_message=""):
        with self.assertRaises(err_type) as cm:
            self.omsclient.activate_deployment(deployment_id)
            log.debug("assert_deploy_fail cm: %s", str(cm))
            if fail_message:
                self.assertIn(fail_message, cm.exception.message)

    def test_3x3_matchups_remoteplatform(self):
        self.base_3x3_matchups(IonObject(OT.RemotePlatformDeploymentContext))

    def test_3x3_matchups_cabledinstrument(self):
        self.base_3x3_matchups(IonObject(OT.CabledInstrumentDeploymentContext))

    def test_3x3_matchups_cablednode(self):
        self.base_3x3_matchups(IonObject(OT.CabledNodeDeploymentContext))

    def base_3x3_matchups(self, deployment_context):
        """
        This will be 1 root platform, 3 sub platforms (2 of one model, 1 of another) and 3 sub instruments each (2-to-1)
        """
        deployment_context_type = type(deployment_context).__name__

        instrument_model_id = [
            self.RR2.create(any_old(RT.InstrumentModel)) for _ in range(6)
        ]
        platform_model_id = [
            self.RR2.create(any_old(RT.PlatformModel)) for _ in range(3)
        ]

        instrument_device_id = [
            self.RR2.create(any_old(RT.InstrumentDevice)) for _ in range(9)
        ]
        platform_device_id = [
            self.RR2.create(any_old(RT.PlatformDevice)) for _ in range(4)
        ]

        instrument_site_id = [
            self.RR2.create(
                any_old(
                    RT.InstrumentSite, {
                        "reference_designator":
                        "GA01SUMO-FI003-0%s-CTDMO0999" % (i + 1),
                        "planned_uplink_port":
                        IonObject(
                            OT.PlatformPort,
                            reference_designator="GA01SUMO-FI003-0%s-CTDMO0999"
                            % (i + 1))
                    })) for i in range(9)
        ]

        platform_site_id = [
            self.RR2.create(
                any_old(
                    RT.PlatformSite, {
                        "reference_designator":
                        "GA01SUMO-FI003-0%s-CTDMO0888" % (i + 1),
                        "planned_uplink_port":
                        IonObject(
                            OT.PlatformPort,
                            reference_designator="GA01SUMO-FI003-0%s-CTDMO0888"
                            % (i + 1))
                    })) for i in range(4)
        ]

        def instrument_model_at(platform_idx, instrument_idx):
            m = platform_idx * 2
            if instrument_idx > 0:
                m += 1
            return m

        def platform_model_at(platform_idx):
            if platform_idx > 0:
                return 1
            return 0

        def instrument_at(platform_idx, instrument_idx):
            return platform_idx * 3 + instrument_idx

        # set up the structure
        for p in range(3):
            m = platform_model_at(p)
            self.RR2.assign_platform_model_to_platform_site_with_has_model(
                platform_model_id[m], platform_site_id[p])
            self.RR2.assign_platform_model_to_platform_device_with_has_model(
                platform_model_id[m], platform_device_id[p])
            self.RR2.assign_platform_device_to_platform_device_with_has_device(
                platform_device_id[p], platform_device_id[3])
            self.RR2.assign_platform_site_to_platform_site_with_has_site(
                platform_site_id[p], platform_site_id[3])

            for i in range(3):
                m = instrument_model_at(p, i)
                idx = instrument_at(p, i)
                self.RR2.assign_instrument_model_to_instrument_site_with_has_model(
                    instrument_model_id[m], instrument_site_id[idx])
                self.RR2.assign_instrument_model_to_instrument_device_with_has_model(
                    instrument_model_id[m], instrument_device_id[idx])
                self.RR2.assign_instrument_device_to_platform_device_with_has_device(
                    instrument_device_id[idx], platform_device_id[p])
                self.RR2.assign_instrument_site_to_platform_site_with_has_site(
                    instrument_site_id[idx], platform_site_id[p])

        # top level models
        self.RR2.assign_platform_model_to_platform_device_with_has_model(
            platform_model_id[2], platform_device_id[3])
        self.RR2.assign_platform_model_to_platform_site_with_has_model(
            platform_model_id[2], platform_site_id[3])

        # verify structure
        for p in range(3):
            parent_id = self.RR2.find_platform_device_id_by_platform_device_using_has_device(
                platform_device_id[p])
            self.assertEqual(platform_device_id[3], parent_id)

            parent_id = self.RR2.find_platform_site_id_by_platform_site_using_has_site(
                platform_site_id[p])
            self.assertEqual(platform_site_id[3], parent_id)

        for i in range(len(platform_site_id)):
            self.assertEqual(
                self.RR2.
                find_platform_model_of_platform_device_using_has_model(
                    platform_device_id[i]),
                self.RR2.find_platform_model_of_platform_site_using_has_model(
                    platform_site_id[i]))

        for i in range(len(instrument_site_id)):
            self.assertEqual(
                self.RR2.
                find_instrument_model_of_instrument_device_using_has_model(
                    instrument_device_id[i]),
                self.RR2.
                find_instrument_model_of_instrument_site_using_has_model(
                    instrument_site_id[i]))

        # OOIReferenceDesignator format: GA01SUMO-FI003-03-CTDMO0999  (site-platform_id-port-device_id)

        port_assignments = {}
        for p in range(3):
            ref_desig = "GA01SUMO-FI003-0%s-CTDMO0888" % (p + 1)
            pp_obj = IonObject(OT.PlatformPort,
                               reference_designator=ref_desig,
                               port_type=PortTypeEnum.PAYLOAD,
                               ip_address=str(p))
            port_assignments[platform_device_id[p]] = pp_obj
            for i in range(3):
                ref_desig = "GA01SUMO-FI003-0%s-CTDMO0999" % ((p * 3) + i + 1)
                pp_obj = IonObject(OT.PlatformPort,
                                   reference_designator=ref_desig,
                                   port_type=PortTypeEnum.PAYLOAD,
                                   ip_address=str(p))
                idx = instrument_at(p, i)
                port_assignments[instrument_device_id[idx]] = pp_obj

        deployment_id = self.RR2.create(
            any_old(
                RT.Deployment, {
                    "context": deployment_context,
                    "port_assignments": port_assignments
                }))

        log.debug("assigning device/site to %s deployment",
                  deployment_context_type)
        if OT.RemotePlatformDeploymentContext == deployment_context_type:
            self.RR2.assign_deployment_to_platform_device_with_has_deployment(
                deployment_id, platform_device_id[3])
            self.RR2.assign_deployment_to_platform_site_with_has_deployment(
                deployment_id, platform_site_id[3])

        elif OT.CabledInstrumentDeploymentContext == deployment_context_type:
            self.RR2.assign_deployment_to_instrument_device_with_has_deployment(
                deployment_id, instrument_device_id[1])
            self.RR2.assign_deployment_to_instrument_site_with_has_deployment(
                deployment_id, instrument_site_id[1])

        elif OT.CabledNodeDeploymentContext == deployment_context_type:
            self.RR2.assign_deployment_to_platform_device_with_has_deployment(
                deployment_id, platform_device_id[1])
            self.RR2.assign_deployment_to_platform_site_with_has_deployment(
                deployment_id, platform_site_id[1])

        log.debug("activation of %s deployment", deployment_context_type)
        self.omsclient.activate_deployment(deployment_id)

        log.debug("validation of %s deployment", deployment_context_type)
        if OT.RemotePlatformDeploymentContext == deployment_context_type:
            # verify proper associations
            for i, d in enumerate(platform_device_id):
                self.assertEqual(
                    d,
                    self.RR2.
                    find_platform_device_id_of_platform_site_using_has_device(
                        platform_site_id[i]))

            for i, d in enumerate(instrument_device_id):
                self.assertEqual(
                    d,
                    self.RR2.
                    find_instrument_device_id_of_instrument_site_using_has_device(
                        instrument_site_id[i]))

        elif OT.CabledInstrumentDeploymentContext == deployment_context_type:
            self.assertEqual(
                instrument_device_id[1],
                self.RR2.
                find_instrument_device_id_of_instrument_site_using_has_device(
                    instrument_site_id[1]))

        elif OT.CabledNodeDeploymentContext == deployment_context_type:
            expected_platforms = [1]

            # verify proper associations
            for i, d in enumerate(platform_device_id):
                self.assertEqual(
                    i in expected_platforms, d in self.RR2.
                    find_platform_device_ids_of_platform_site_using_has_device(
                        platform_site_id[i]))
class TestResourceRegistry(IonIntegrationTestCase):
    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url("res/deploy/r2coi.yml")

        # Now create client to bank service
        self.resource_registry_service = ResourceRegistryServiceClient(node=self.container.node)

    def test_crud(self):
        # Some quick registry tests
        # Can't call new with fields that aren't defined in the object's schema
        with self.assertRaises(TypeError) as cm:
            IonObject("UserInfo", name="name", foo="bar")
        self.assertTrue(cm.exception.message == "__init__() got an unexpected keyword argument 'foo'")

        # Can't call new with fields that aren't defined in the object's schema
        with self.assertRaises(TypeError) as cm:
            IonObject("UserInfo", {"name": "name", "foo": "bar"})
        self.assertTrue(cm.exception.message == "__init__() got an unexpected keyword argument 'foo'")

        # Can't call new with fields that aren't defined in the object's schema
        with self.assertRaises(TypeError) as cm:
            IonObject("UserInfo", {"name": "name"}, foo="bar")
        self.assertTrue(cm.exception.message == "__init__() got an unexpected keyword argument 'foo'")

        # Instantiate an object
        obj = IonObject("UserInfo", name="name")

        # Can set attributes that aren't in the object's schema
        with self.assertRaises(AttributeError) as cm:
            setattr(obj, "foo", "bar")
        self.assertTrue(cm.exception.message == "'UserInfo' object has no attribute 'foo'")

        # Cam't call update with object that hasn't been persisted
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.update(obj)
        self.assertTrue(cm.exception.message.startswith("Object does not have required '_id' or '_rev' attribute"))

        # Persist object and read it back
        obj_id, obj_rev = self.resource_registry_service.create(obj)
        read_obj = self.resource_registry_service.read(obj_id)

        # Cannot create object with _id and _rev fields pre-set
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create(read_obj)
        self.assertTrue(cm.exception.message.startswith("Doc must not have '_id'"))

        # Update object
        read_obj.name = "John Doe"
        self.resource_registry_service.update(read_obj)

        # Update should fail with revision mismatch
        with self.assertRaises(Conflict) as cm:
            self.resource_registry_service.update(read_obj)
        self.assertTrue(cm.exception.message.startswith("Object not based on most current version"))

        # Re-read and update object
        read_obj = self.resource_registry_service.read(obj_id)
        self.resource_registry_service.update(read_obj)

        # Delete object
        self.resource_registry_service.delete(obj_id)

        # Make sure read, update and delete report error
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.read(obj_id)
        self.assertTrue(cm.exception.message.startswith("Object with id"))

        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.update(read_obj)
        self.assertTrue(cm.exception.message.startswith("Object with id"))

        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.delete(obj_id)
        self.assertTrue(cm.exception.message.startswith("Object with id"))

        # Owner creation tests
        user = IonObject("ActorIdentity", name="user")
        uid, _ = self.resource_registry_service.create(user)

        inst = IonObject("InstrumentDevice", name="instrument")
        iid, _ = self.resource_registry_service.create(inst, headers={"ion-actor-id": str(uid)})

        ids, _ = self.resource_registry_service.find_objects(iid, PRED.hasOwner, RT.ActorIdentity, id_only=True)
        self.assertEquals(len(ids), 1)

        assoc = self.resource_registry_service.read(ids[0])
        self.resource_registry_service.delete(iid)

        with self.assertRaises(NotFound) as ex:
            assoc = self.resource_registry_service.read(iid)

    def test_lifecycle(self):
        att = IonObject("InstrumentDevice", name="mine", description="desc")

        rid, rev = self.resource_registry_service.create(att)

        att1 = self.resource_registry_service.read(rid)
        self.assertEquals(att1.name, att.name)
        self.assertEquals(att1.lcstate, LCS.DRAFT_PRIVATE)

        new_state = self.resource_registry_service.execute_lifecycle_transition(rid, LCE.PLAN)
        self.assertEquals(new_state, LCS.PLANNED_PRIVATE)

        att2 = self.resource_registry_service.read(rid)
        self.assertEquals(att2.lcstate, LCS.PLANNED_PRIVATE)

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.execute_lifecycle_transition(rid, LCE.UNANNOUNCE)
        self.assertTrue(
            "type=InstrumentDevice, lcstate=PLANNED_PRIVATE has no transition for event unannounce"
            in cm.exception.message
        )

        new_state = self.resource_registry_service.execute_lifecycle_transition(rid, LCE.DEVELOP)
        self.assertEquals(new_state, LCS.DEVELOPED_PRIVATE)

        self.assertRaises(
            iex.BadRequest,
            self.resource_registry_service.execute_lifecycle_transition,
            resource_id=rid,
            transition_event="NONE##",
        )

        self.resource_registry_service.set_lifecycle_state(rid, LCS.INTEGRATED_PRIVATE)
        att1 = self.resource_registry_service.read(rid)
        self.assertEquals(att1.lcstate, LCS.INTEGRATED_PRIVATE)

    def test_association(self):
        # Instantiate ActorIdentity object
        actor_identity_obj = IonObject("ActorIdentity", name="name")
        actor_identity_obj_id, actor_identity_obj_rev = self.resource_registry_service.create(actor_identity_obj)
        read_actor_identity_obj = self.resource_registry_service.read(actor_identity_obj_id)

        # Instantiate UserInfo object
        user_info_obj = IonObject("UserInfo", name="name")
        user_info_obj_id, user_info_obj_rev = self.resource_registry_service.create(user_info_obj)
        read_user_info_obj = self.resource_registry_service.read(user_info_obj_id)

        # Test create failures
        with self.assertRaises(AttributeError) as cm:
            self.resource_registry_service.create_association(actor_identity_obj_id, PRED.bogus, user_info_obj_id)
        self.assertTrue(cm.exception.message == "bogus")

        # Predicate not provided
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(actor_identity_obj_id, None, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Association must have all elements set")

        # Bad association type
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj_id, PRED.hasInfo, user_info_obj_id, "bogustype"
            )
        self.assertTrue(cm.exception.message == "Unsupported assoc_type: bogustype")

        # Subject id or object not provided
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(None, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Association must have all elements set")

        # Object id or object not provided
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(actor_identity_obj_id, PRED.hasInfo, None)
        self.assertTrue(cm.exception.message == "Association must have all elements set")

        # Bad subject id
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.create_association("bogus", PRED.hasInfo, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Object with id bogus does not exist.")

        # Bad object id
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.create_association(actor_identity_obj_id, PRED.hasInfo, "bogus")
        self.assertTrue(cm.exception.message == "Object with id bogus does not exist.")

        # _id missing from subject
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(actor_identity_obj, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Subject id or rev not available")

        # _id missing from object
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(actor_identity_obj_id, PRED.hasInfo, user_info_obj)
        self.assertTrue(cm.exception.message == "Object id or rev not available")

        # Wrong subject type
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(user_info_obj_id, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Illegal subject type UserInfo for predicate hasInfo")

        # Wrong object type
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj_id, PRED.hasInfo, actor_identity_obj_id
            )
        self.assertTrue(cm.exception.message == "Illegal object type ActorIdentity for predicate hasInfo")

        # Create two different association types between the same subject and predicate
        assoc_id1, assoc_rev1 = self.resource_registry_service.create_association(
            actor_identity_obj_id, PRED.hasInfo, user_info_obj_id
        )

        # Read object, subject
        res_obj1 = self.resource_registry_service.read_object(actor_identity_obj_id, PRED.hasInfo, RT.UserInfo)
        self.assertEquals(res_obj1._id, user_info_obj_id)
        res_obj1 = self.resource_registry_service.read_object(
            actor_identity_obj_id, PRED.hasInfo, RT.UserInfo, id_only=True
        )
        self.assertEquals(res_obj1, user_info_obj_id)
        res_obj2 = self.resource_registry_service.read_subject(RT.ActorIdentity, PRED.hasInfo, user_info_obj_id)
        self.assertEquals(res_obj2._id, actor_identity_obj_id)
        res_obj2 = self.resource_registry_service.read_subject(
            RT.ActorIdentity, PRED.hasInfo, user_info_obj_id, id_only=True
        )
        self.assertEquals(res_obj2, actor_identity_obj_id)

        # Create a similar association to a specific revision
        # TODO: This is not a supported case so far
        assoc_id2, assoc_rev2 = self.resource_registry_service.create_association(
            actor_identity_obj_id, PRED.hasInfo, user_info_obj_id, "H2R"
        )

        # Search for associations (good cases)
        ret1 = self.resource_registry_service.find_associations(actor_identity_obj_id, PRED.hasInfo, user_info_obj_id)
        ret2 = self.resource_registry_service.find_associations(actor_identity_obj_id, PRED.hasInfo)
        ret3 = self.resource_registry_service.find_associations(None, PRED.hasInfo)
        self.assertTrue(len(ret1) == len(ret2) == len(ret3))
        self.assertTrue(ret1[0]._id == ret2[0]._id == ret3[0]._id)

        ret1 = self.resource_registry_service.find_associations(
            actor_identity_obj_id, PRED.hasInfo, user_info_obj_id, None, False
        )
        ret2 = self.resource_registry_service.find_associations(actor_identity_obj_id, PRED.hasInfo, id_only=False)
        ret3 = self.resource_registry_service.find_associations(predicate=PRED.hasInfo, id_only=False)
        self.assertTrue(ret1 == ret2 == ret3)

        # Search for associations (good cases)
        ret1 = self.resource_registry_service.find_associations(
            read_actor_identity_obj, PRED.hasInfo, read_user_info_obj
        )
        ret2 = self.resource_registry_service.find_associations(read_actor_identity_obj, PRED.hasInfo)
        ret3 = self.resource_registry_service.find_associations(None, PRED.hasInfo)
        self.assertTrue(len(ret1) == len(ret2) == len(ret3))
        self.assertTrue(ret1[0]._id == ret2[0]._id == ret3[0]._id)

        ret1 = self.resource_registry_service.find_associations(
            actor_identity_obj_id, PRED.hasInfo, read_user_info_obj, None, True
        )
        ret2 = self.resource_registry_service.find_associations(actor_identity_obj_id, PRED.hasInfo, id_only=True)
        ret3 = self.resource_registry_service.find_associations(predicate=PRED.hasInfo, id_only=True)
        self.assertTrue(ret1 == ret2 == ret3)

        # Search for associations (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_associations(None, None, None)
        self.assertTrue(cm.exception.message == "Illegal parameters")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_associations(actor_identity_obj_id, None, None)
        self.assertTrue(cm.exception.message == "Illegal parameters")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_associations(None, None, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Illegal parameters")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_associations(actor_identity_obj, None, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Object id not available in subject")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_associations(actor_identity_obj_id, None, user_info_obj)
        self.assertTrue(cm.exception.message == "Object id not available in object")

        # Find subjects (good cases)
        subj_ret1 = self.resource_registry_service.find_subjects(RT.ActorIdentity, PRED.hasInfo, user_info_obj_id, True)
        subj_ret2 = self.resource_registry_service.find_subjects(
            RT.ActorIdentity, PRED.hasInfo, read_user_info_obj, True
        )
        self.assertTrue(len(subj_ret1) == len(subj_ret2))
        self.assertTrue(subj_ret1[0] == subj_ret2[0])
        self.assertTrue(subj_ret1[1][0]._id == subj_ret2[1][0]._id)

        subj_ret3 = self.resource_registry_service.find_subjects(None, PRED.hasInfo, user_info_obj_id, True)
        subj_ret4 = self.resource_registry_service.find_subjects(None, None, read_user_info_obj, True)
        self.assertTrue(len(subj_ret3) == len(subj_ret4))
        self.assertTrue(subj_ret3[0] == subj_ret4[0])
        self.assertTrue(subj_ret3[1][0]._id == subj_ret4[1][0]._id)

        subj_ret5 = self.resource_registry_service.find_subjects(None, PRED.hasInfo, user_info_obj_id, False)
        subj_ret6 = self.resource_registry_service.find_subjects(None, None, read_user_info_obj, False)
        self.assertTrue(len(subj_ret5) == len(subj_ret6))
        self.assertTrue(subj_ret5[0][0]._id == subj_ret6[0][0]._id)
        self.assertTrue(subj_ret5[1][0]._id == subj_ret6[1][0]._id)

        # Find subjects (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_subjects(None, None, None)
        self.assertTrue(cm.exception.message == "Must provide object")

        with self.assertRaises(AttributeError) as cm:
            self.resource_registry_service.find_subjects(RT.UserCredentials, PRED.bogus, user_info_obj_id, True)
        self.assertTrue(cm.exception.message == "bogus")

        ret = self.resource_registry_service.find_subjects(RT.UserInfo, PRED.hasCredentials, user_info_obj_id, True)
        self.assertTrue(len(ret[0]) == 0)

        ret = self.resource_registry_service.find_subjects(RT.UserCredentials, PRED.hasInfo, user_info_obj_id, True)
        self.assertTrue(len(ret[0]) == 0)

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_subjects(RT.UserCredentials, PRED.hasInfo, user_info_obj, True)
        self.assertTrue(cm.exception.message == "Object id not available in object")

        # Find objects (good cases)
        subj_ret1 = self.resource_registry_service.find_objects(actor_identity_obj_id, PRED.hasInfo, RT.UserInfo, True)
        subj_ret2 = self.resource_registry_service.find_objects(
            read_actor_identity_obj, PRED.hasInfo, RT.UserInfo, True
        )
        self.assertTrue(len(subj_ret1) == len(subj_ret2))
        self.assertTrue(subj_ret1[0] == subj_ret2[0])
        self.assertTrue(subj_ret1[1][0]._id == subj_ret2[1][0]._id)

        subj_ret3 = self.resource_registry_service.find_objects(actor_identity_obj_id, PRED.hasInfo, None, True)
        subj_ret4 = self.resource_registry_service.find_objects(actor_identity_obj_id, None, None, True)
        self.assertTrue(len(subj_ret3) == len(subj_ret4))
        self.assertTrue(subj_ret3[0] == subj_ret4[0])
        self.assertTrue(subj_ret3[1][0]._id == subj_ret4[1][0]._id)

        subj_ret5 = self.resource_registry_service.find_objects(actor_identity_obj_id, PRED.hasInfo, None, False)
        subj_ret6 = self.resource_registry_service.find_objects(read_actor_identity_obj, None, None, False)
        self.assertTrue(len(subj_ret5) == len(subj_ret6))
        self.assertTrue(subj_ret5[0][0]._id == subj_ret6[0][0]._id)
        self.assertTrue(subj_ret5[1][0]._id == subj_ret6[1][0]._id)

        # Find objects (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_objects(None, None, None)
        self.assertTrue(cm.exception.message == "Must provide subject")

        with self.assertRaises(AttributeError) as cm:
            self.resource_registry_service.find_objects(actor_identity_obj_id, PRED.bogus, RT.UserCredentials, True)
        self.assertTrue(cm.exception.message == "bogus")

        ret = self.resource_registry_service.find_objects(
            actor_identity_obj_id, PRED.hasCredentials, RT.ActorIdentity, True
        )
        self.assertTrue(len(ret[0]) == 0)

        ret = self.resource_registry_service.find_objects(actor_identity_obj_id, PRED.hasInfo, RT.UserCredentials, True)
        self.assertTrue(len(ret[0]) == 0)

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_objects(actor_identity_obj, PRED.hasInfo, RT.UserInfo, True)
        self.assertTrue(cm.exception.message == "Object id not available in subject")

        # Get association (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.get_association(None, None, None)
        self.assertTrue(cm.exception.message == "Illegal parameters")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.get_association(actor_identity_obj_id, None, None)
        self.assertTrue(cm.exception.message == "Illegal parameters")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.get_association(None, None, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Illegal parameters")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.get_association(actor_identity_obj, None, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Object id not available in subject")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.get_association(actor_identity_obj_id, None, user_info_obj)
        self.assertTrue(cm.exception.message == "Object id not available in object")

        # Delete one of the associations
        self.resource_registry_service.delete_association(assoc_id2)

        assoc = self.resource_registry_service.get_association(actor_identity_obj_id, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(assoc._id == assoc_id1)

        # Delete (bad cases)
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.delete_association("bogus")
        self.assertTrue(cm.exception.message == "Object with id bogus does not exist.")

        # Delete other association
        self.resource_registry_service.delete_association(assoc_id1)

        # Delete resources
        self.resource_registry_service.delete(actor_identity_obj_id)
        self.resource_registry_service.delete(user_info_obj_id)

    def test_find_resources(self):
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_resources(RT.UserInfo, LCS.DRAFT, "name", False)
        self.assertTrue(cm.exception.message == "find by name does not support lcstate")

        ret = self.resource_registry_service.find_resources(RT.UserInfo, None, "name", False)
        self.assertTrue(len(ret[0]) == 0)

        # Instantiate an object
        obj = IonObject("InstrumentDevice", name="name")

        # Persist object and read it back
        obj_id, obj_rev = self.resource_registry_service.create(obj)
        read_obj = self.resource_registry_service.read(obj_id)

        ret = self.resource_registry_service.find_resources(RT.InstrumentDevice, None, "name", False)
        self.assertTrue(len(ret[0]) == 1)
        self.assertTrue(ret[0][0]._id == read_obj._id)

        ret = self.resource_registry_service.find_resources(RT.InstrumentDevice, LCS.DRAFT, None, False)
        self.assertTrue(len(ret[0]) == 1)
        self.assertTrue(ret[0][0]._id == read_obj._id)

    # @attr('INT', group='coirr1')
    # class TestResourceRegistry1(IonIntegrationTestCase):
    #
    #    def setUp(self):
    #        # Start container
    #        self._start_container()
    #        self.container.start_rel_from_url('res/deploy/r2coi.yml')
    #
    #        # Now create client to bank service
    #        self.resource_registry_service = ResourceRegistryServiceClient(node=self.container.node)

    def test_attach(self):

        binary = "\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x10\x00\x00\x00\x10\x08\x03\x00\x00\x00(-\x0fS\x00\x00\x00\x03sBIT\x08\x08\x08\xdb\xe1O\xe0\x00\x00\x00~PLTEf3\x00\xfc\xf7\xe0\xee\xcc\x00\xd3\xa0\x00\xcc\x99\x00\xec\xcdc\x9fl\x00\xdd\xb2\x00\xff\xff\xff|I\x00\xf9\xdb\x00\xdd\xb5\x19\xd9\xad\x10\xb6\x83\x00\xf8\xd6\x00\xf2\xc5\x00\xd8\xab\x00n;\x00\xff\xcc\x00\xd6\xa4\t\xeb\xb8\x00\x83Q\x00\xadz\x00\xff\xde\x00\xff\xd6\x00\xd6\xa3\x00\xdf\xaf\x00\xde\xad\x10\xbc\x8e\x00\xec\xbe\x00\xec\xd4d\xff\xe3\x00tA\x00\xf6\xc4\x00\xf6\xce\x00\xa5u\x00\xde\xa5\x00\xf7\xbd\x00\xd6\xad\x08\xdd\xaf\x19\x8cR\x00\xea\xb7\x00\xee\xe9\xdf\xc5\x00\x00\x00\tpHYs\x00\x00\n\xf0\x00\x00\n\xf0\x01B\xac4\x98\x00\x00\x00\x1ctEXtSoftware\x00Adobe Fireworks CS4\x06\xb2\xd3\xa0\x00\x00\x00\x15tEXtCreation Time\x0029/4/09Oq\xfdE\x00\x00\x00\xadIDAT\x18\x95M\x8f\x8d\x0e\x820\x0c\x84;ZdC~f\x07\xb2\x11D\x86\x89\xe8\xfb\xbf\xa0+h\xe2\x97\\\xd2^\x93\xb6\x07:1\x9f)q\x9e\xa5\x06\xad\xd5\x13\x8b\xac,\xb3\x02\x9d\x12C\xa1-\xef;M\x08*\x19\xce\x0e?\x1a\xeb4\xcc\xd4\x0c\x831\x87V\xca\xa1\x1a\xd3\x08@\xe4\xbd\xb7\x15P;\xc8\xd4{\x91\xbf\x11\x90\xffg\xdd\x8di\xfa\xb6\x0bs2Z\xff\xe8yg2\xdc\x11T\x96\xc7\x05\xa5\xef\x96+\xa7\xa59E\xae\xe1\x84cm^1\xa6\xb3\xda\x85\xc8\xd8/\x17se\x0eN^'\x8c\xc7\x8e\x88\xa8\xf6p\x8e\xc2;\xc6.\xd0\x11.\x91o\x12\x7f\xcb\xa5\xfe\x00\x89]\x10:\xf5\x00\x0e\xbf\x00\x00\x00\x00IEND\xaeB`\x82"

        # Owner creation tests
        instrument = IonObject("InstrumentDevice", name="instrument")
        iid, _ = self.resource_registry_service.create(instrument)

        att = Attachment(content=binary, attachment_type=AttachmentType.BLOB)
        aid1 = self.resource_registry_service.create_attachment(iid, att)

        att1 = self.resource_registry_service.read_attachment(aid1)
        self.assertEquals(binary, att1.content)

        import base64

        att = Attachment(content=base64.encodestring(binary), attachment_type=AttachmentType.ASCII)
        aid2 = self.resource_registry_service.create_attachment(iid, att)

        att1 = self.resource_registry_service.read_attachment(aid2)
        self.assertEquals(binary, base64.decodestring(att1.content))

        att_ids = self.resource_registry_service.find_attachments(iid, id_only=True)
        self.assertEquals(att_ids, [aid1, aid2])

        att_ids = self.resource_registry_service.find_attachments(iid, id_only=True, descending=True)
        self.assertEquals(att_ids, [aid2, aid1])

        att_ids = self.resource_registry_service.find_attachments(iid, id_only=True, descending=True, limit=1)
        self.assertEquals(att_ids, [aid2])

        atts = self.resource_registry_service.find_attachments(iid, id_only=False, limit=1)
        self.assertEquals(atts[0].content, att1.content)

        self.resource_registry_service.delete_attachment(aid1)

        att_ids = self.resource_registry_service.find_attachments(iid, id_only=True)
        self.assertEquals(att_ids, [aid2])

        self.resource_registry_service.delete_attachment(aid2)

        att_ids = self.resource_registry_service.find_attachments(iid, id_only=True)
        self.assertEquals(att_ids, [])
class TestObservatoryManagementFullIntegration(IonIntegrationTestCase):

    def setUp(self):
        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.RR = ResourceRegistryServiceClient()
        self.RR2 = EnhancedResourceRegistryClient(self.RR)
        self.OMS = ObservatoryManagementServiceClient()
        self.org_management_service = OrgManagementServiceClient()
        self.IMS =  InstrumentManagementServiceClient()
        self.dpclient = DataProductManagementServiceClient()
        self.pubsubcli =  PubsubManagementServiceClient()
        self.damsclient = DataAcquisitionManagementServiceClient()
        self.dataset_management = DatasetManagementServiceClient()
        self.data_retriever = DataRetrieverServiceClient()
        self.data_product_management = DataProductManagementServiceClient()

        self._load_stage = 0
        self._resources = {}

    def preload_ooi(self, stage=STAGE_LOAD_ASSETS):
        # Preloads OOI up to a given stage

        if self._load_stage >= stage:
            return

        if self._load_stage < STAGE_LOAD_ORGS:
            log.info("--------------------------------------------------------------------------------------------------------")
            log.info("Preloading stage: %s (OOIR2 Orgs, users, roles)", STAGE_LOAD_ORGS)
            # load_OOIR2_scenario
            self.container.spawn_process("Loader", "ion.processes.bootstrap.ion_loader", "IONLoader", config=dict(
                op="load",
                scenario="OOIR2",
                path="master",
                ))
            self._load_stage = STAGE_LOAD_ORGS

        if self._load_stage < STAGE_LOAD_PARAMS:
            log.info("--------------------------------------------------------------------------------------------------------")
            log.info("Preloading stage: %s (BASE params, streamdefs)", STAGE_LOAD_PARAMS)
            # load_parameter_scenarios
            self.container.spawn_process("Loader", "ion.processes.bootstrap.ion_loader", "IONLoader", config=dict(
                op="load",
                scenario="BETA",
                path="master",
                categories="ParameterFunctions,ParameterDefs,ParameterDictionary,StreamDefinition",
                clearcols="owner_id,org_ids",
                assets="res/preload/r2_ioc/ooi_assets",
                parseooi="True",
                ))
            self._load_stage = STAGE_LOAD_PARAMS

        if self._load_stage < STAGE_LOAD_AGENTS:
            log.info("--------------------------------------------------------------------------------------------------------")
            log.info("Preloading stage: %s (OOIR2_I agents, model links)", STAGE_LOAD_AGENTS)
            # load_OOIR2_agents
            self.container.spawn_process("Loader", "ion.processes.bootstrap.ion_loader", "IONLoader", config=dict(
                op="load",
                scenario="OOIR2_I",
                path="master",
                ))
            self._load_stage = STAGE_LOAD_AGENTS

        if self._load_stage < STAGE_LOAD_ASSETS:
            log.info("--------------------------------------------------------------------------------------------------------")
            log.info("Preloading stage: %s (OOI assets linked to params, agents)", STAGE_LOAD_ASSETS)
            # load_ooi_assets
            self.container.spawn_process("Loader", "ion.processes.bootstrap.ion_loader", "IONLoader", config=dict(
                op="load",
                loadooi="True",
                path="master",
                assets="res/preload/r2_ioc/ooi_assets",
                bulk="True",
                debug="True",
                ooiuntil="9/1/2013",
                ooiparams="True",
                #excludecategories: DataProduct,DataProductLink,Deployment,Workflow,WorkflowDefinition
                ))
            self._load_stage = STAGE_LOAD_ASSETS

        # 'DataProduct,DataProductLink,WorkflowDefinition,ExternalDataProvider,ExternalDatasetModel,ExternalDataset,ExternalDatasetAgent,ExternalDatasetAgentInstance',


    @unittest.skip('Work in progress')
    def test_observatory(self):
        self._load_stage = 0
        self._resources = {}
        passing = True

        self.assertTrue(True)

        # LOAD STEP 1
        self.preload_ooi(stage=STAGE_LOAD_ORGS)

        passing &= self.orguserrole_assertions()


        # LOAD STEP 2
        self.preload_ooi(stage=STAGE_LOAD_PARAMS)

        passing &= self.parameter_assertions()


        # LOAD STEP 3
        self.preload_ooi(stage=STAGE_LOAD_AGENTS)

        passing &= self.agent_assertions()


        # LOAD STEP 4
        self.preload_ooi(stage=STAGE_LOAD_ASSETS)

        # Check OOI preloaded resources to see if they match needs for this test and for correctness
        passing &= self.sites_assertions()
        passing &= self.device_assertions()
        passing &= self.deployment_assertions()

        # Extensive tests on select RSN nodes
        passing &= self.rsn_node_checks()

        # Extensive tests on select RSN instruments
        passing &= self.check_rsn_instrument()

        passing &= self.check_rsn_instrument_data_product()

        # Extensive tests on a glider
        #passing &= self.check_glider()

        # Extensive tests on a CG assembly
        #passing &= self.check_cg_assembly()



        # Add a new instrument agent
        # Add a new instrument agent instance
        # Check DataProducts
        # Check Provenance

        IonIntegrationTestCase.assertTrue(self, passing)


    # -------------------------------------------------------------------------

    def orguserrole_assertions(self):
        passing = True

        passing &= self._check_marine_facility("MF_CGSN")
        passing &= self._check_marine_facility("MF_RSN")
        passing &= self._check_marine_facility("MF_EA")

        return passing

    def _check_marine_facility(self, preload_id):
        passing = True
        log.debug("Checking marine facility %s and associations", preload_id)

        mf_obj = self.retrieve_ooi_asset(preload_id)
        mf_id = mf_obj._id
        self._resources[preload_id] = mf_id

        passing &= self.assertEquals(mf_obj.lcstate, LCS.DEPLOYED)

        res_list, _ = self.RR.find_objects(subject=mf_id, predicate=PRED.hasMembership, id_only=True)
        passing &= self.assertTrue(len(res_list) >= 3)

        res_list, _ = self.RR.find_objects(subject=mf_id, predicate=PRED.hasRole, id_only=False)
        passing &= self.assertTrue(len(res_list) >= 5)

        passing &= self._check_role_assignments(res_list, "ORG_MANAGER")
        passing &= self._check_role_assignments(res_list, "OBSERVATORY_OPERATOR")
        passing &= self._check_role_assignments(res_list, "INSTRUMENT_OPERATOR")

        return passing

    def _check_role_assignments(self, role_list, role_name):
        passing = True
        role_obj = self._find_resource_in_list(role_list, "governance_name", role_name)
        if role_obj:
            res_list = self.RR.find_subjects(predicate=PRED.hasRole, object=role_obj._id, id_only=True)
            passing &= self.assertTrue(len(res_list) >= 1)

        return passing


    def parameter_assertions(self):
        passing = True

        pctx_list, _ = self.RR.find_resources_ext(restype=RT.ParameterContext)
        passing &= self.assertTrue(len(pctx_list) >= 10)

        pdict_list, _ = self.RR.find_resources_ext(restype=RT.ParameterDictionary)
        passing &= self.assertTrue(len(pdict_list) >= 10)

        sdef_list, _ = self.RR.find_resources_ext(restype=RT.StreamDefinition)
        passing &= self.assertTrue(len(sdef_list) >= 10)

        # Verify that a PDict has the appropriate QC parameters defined
        pdicts, _ = self.RR.find_resources_ext(restype=RT.ParameterDictionary, alt_id_ns='PRE', alt_id='DICT110')
        passing &= self.assertTrue(len(pdicts)==1)
        if not pdicts:
            return passing
        pdict = pdicts[0]

        # According to the latest SAF, density should NOT have trend

        parameters, _ = self.RR.find_objects(pdict, PRED.hasParameterContext)
        names = [i.name for i in parameters if i.name.startswith('density')]
        passing &= self.assertTrue('density_trndtst_qc' not in names)

        return passing

    def agent_assertions(self):
        passing = True

        # TODO: More tests?

        return passing

    def sites_assertions(self):
        passing = True
        observatory_list, _ = self.RR.find_resources_ext(restype=RT.Observatory)
        passing &= self.assertTrue(len(observatory_list) >= 40)
        for obs in observatory_list:
            passing &= self.assertEquals(obs.lcstate, LCS.DEPLOYED)

        platform_site_list, _ = self.RR.find_resources(RT.PlatformSite, id_only=False)
        log.debug('platform sites: %s', [ps.name for ps in platform_site_list])
        passing &= self.assertTrue(len(platform_site_list) >= 30)

        return passing

    def device_assertions(self):
        passing = True
        platform_device_list, _ = self.RR.find_resources(RT.PlatformDevice, id_only=False)
        passing &= self.assertTrue(len(platform_device_list) >= 30)
        for pdev in platform_device_list:
            log.debug('platform device: %s', pdev.name)
            passing &= self.assertEquals(pdev.lcstate, LCS.PLANNED)

        platform_agent_list, _ = self.RR.find_resources(RT.PlatformAgent, id_only=False)
        passing &= self.assertTrue(len(platform_agent_list) >= 2)
        for pagent in platform_agent_list:
            log.debug('platform agent: %s', pagent.name)
            passing &= self.assertEquals(pagent.lcstate, LCS.DEPLOYED)

        instrument_agent_list, _ = self.RR.find_resources(RT.InstrumentAgent, id_only=False)
        passing &= self.assertTrue(len(instrument_agent_list) >= 3)
        for iagent in instrument_agent_list:
            log.debug('instrument agent: %s', iagent.name)
            passing &= self.assertEquals(iagent.lcstate, LCS.DEPLOYED)

            model_list, _ = self.RR.find_objects(subject=iagent._id, predicate=PRED.hasModel, id_only=True)
            passing &= self.assertTrue(len(model_list) >= 1, "IA %s" % iagent.name)

        return passing

    def deployment_assertions(self):
        passing = True
        deployment_list, _ = self.RR.find_resources(RT.Deployment, id_only=False)
        passing &= self.assertTrue(len(deployment_list) >= 30)
        for deploy in deployment_list:
            log.debug('deployment: %s', deploy.name)
            passing &= self.assertEquals(deploy.lcstate, LCS.DEPLOYED)
        return passing

    def rsn_node_checks(self):
        """
        Current preload creates:
        - PlatformDevice in PLANNED
        - PlatformSite in DEPLOYED
        - Deployment in DEPLOYED
        - Deployment is NOT activated
        """
        passing = True

        dp_obj = self.retrieve_ooi_asset("CE04OSHY-PN01C_DEP")

        passing &= self.assertEquals(dp_obj.lcstate, LCS.DEPLOYED)
        passing &= self.assertEquals(dp_obj.availability, AS.AVAILABLE)
        log.debug('test_observatory  retrieve CE04OSHY-PN01C_DEP deployment:  %s', dp_obj)

        # Check existing RSN node CE04OSHY-LV01C Deployment (PLANNED lcstate)
        CE04OSHY_LV01C_deployment = self.retrieve_ooi_asset('CE04OSHY-LV01C_DEP')
        passing &= self.assertEquals(CE04OSHY_LV01C_deployment.lcstate, LCS.DEPLOYED)
        passing &= self.assertEquals(CE04OSHY_LV01C_deployment.availability, AS.AVAILABLE)

        #self.dump_deployment(CE04OSHY_LV01C_deployment._id)
        log.debug('test_observatory  retrieve RSN node CE04OSHY-LV01C Deployment:  %s', CE04OSHY_LV01C_deployment)

        CE04OSHY_LV01C_device = self.retrieve_ooi_asset('CE04OSHY-LV01C_PD')

        # Set CE04OSHY-LV01C device to DEVELOPED state
        passing &= self.transition_lcs_then_verify(resource_id=CE04OSHY_LV01C_device._id, new_lcs_state=LCE.DEVELOP, verify=LCS.DEVELOPED)

        # Set CE04OSHY-LV01C device to INTEGRATED state
        passing &= self.transition_lcs_then_verify(resource_id=CE04OSHY_LV01C_device._id, new_lcs_state=LCE.INTEGRATE, verify=LCS.INTEGRATED)

        # Set CE04OSHY-LV01C device to DEPLOYED state
        passing &= self.transition_lcs_then_verify(resource_id=CE04OSHY_LV01C_device._id, new_lcs_state=LCE.DEPLOY, verify=LCS.DEPLOYED)

        # Set CE04OSHY-LV01C Deployment to DEPLOYED state
        # NOTE: Deployments are created in DEPLOYED state, currently
        #self.transition_lcs_then_verify(resource_id=CE04OSHY_LV01C_deployment._id, new_lcs_state=LCE.DEPLOY, verify='DEPLOYED')
        
        # Activate Deployment for CE04OSHY-LV01C
        self.OMS.activate_deployment(CE04OSHY_LV01C_deployment._id)
        log.debug('---------    activate_deployment CE04OSHY_LV01C_deployment -------------- ')
        self.dump_deployment(CE04OSHY_LV01C_deployment._id)
        passing &= self.validate_deployment_activated(CE04OSHY_LV01C_deployment._id)
        
        # (optional) Start CE04OSHY-LV01C platform agent with simulator

        # NOTE: DataProduct is generated in DEPLOYED state
        # # Set DataProduct for CE04OSHY-LV01C platform to DEPLOYED state
        # output_data_product_ids, assns =self.RR.find_objects(subject=CE04OSHY_LV01C_device._id, predicate=PRED.hasOutputProduct, id_only=True)
        # if output_data_product_ids:
        #     #self.assertEquals(len(child_devs), 3)
        #     for output_data_product_id in output_data_product_ids:
        #         log.debug('DataProduct for CE04OSHY-LV01C platform:  %s', output_data_product_id)
        #         self.transition_lcs_then_verify(resource_id=output_data_product_id, new_lcs_state=LCE.DEPLOY, verify='DEPLOYED')

        # Check events for CE04OSHY-LV01C platform

        # Check existing CE04OSBP-LJ01C Deployment (PLANNED lcstate)
#        dp_list, _  = self.RR.find_resources_ext(alt_id_ns="PRE", alt_id="CE04OSBP-LJ01C_DEP")
#        self.assertEquals(len(dp_list), 1)
#        CE04OSHY_LV01C_deployment = dp_list[0]
#        self.assertEquals(CE04OSHY_LV01C_deployment.lcstate, 'PLANNED')
#        log.debug('test_observatory  retrieve RSN node CE04OSBP-LJ01C Deployment:  %s', CE04OSHY_LV01C_deployment)


        # Set CE04OSBP-LJ01C Deployment to DEPLOYED state

        # Update description and other attributes for CE04OSBP-LJ01C device resource

        # Create attachment (JPG image) for CE04OSBP-LJ01C device resource

        # Activate Deployment for CE04OSBP-LJ01C

        # (optional) Add/register CE04OSBP-LJ01C platform agent to parent agent

        # (optional) Start CE04OSBP-LJ01C platform agent

        return passing
    
    def check_rsn_instrument(self):
        """
        Check existing RSN instrument CE04OSBP-LJ01C-06-CTDBPO108 Deployment (PLANNED lcstate)
        Current preload creates:
        - InstrumentDevice in PLANNED
        - InstrumentSite in DEPLOYED
        - Deployment in DEPLOYED
        - Deployment is activated
        """

        passing = True
        CE04OSBP_LJ01C_06_CTDBPO108_deploy = self.retrieve_ooi_asset('CE04OSBP-LJ01C-06-CTDBPO108_DEP')
        self.dump_deployment(CE04OSBP_LJ01C_06_CTDBPO108_deploy._id)
        #passing &= self.assertEquals(CE04OSBP_LJ01C_06_CTDBPO108_deploy.lcstate, 'PLANNED')

        # Set CE04OSBP-LJ01C-06-CTDBPO108 device to DEVELOPED state
        CE04OSBP_LJ01C_06_CTDBPO108_device = self.retrieve_ooi_asset('CE04OSBP-LJ01C-06-CTDBPO108_ID')
        passing &= self.transition_lcs_then_verify(resource_id=CE04OSBP_LJ01C_06_CTDBPO108_device._id, new_lcs_state=LCE.DEVELOP, verify='DEVELOPED')

        # Set CE04OSBP-LJ01C-06-CTDBPO108 device to INTEGRATED state
        passing &= self.transition_lcs_then_verify(resource_id=CE04OSBP_LJ01C_06_CTDBPO108_device._id, new_lcs_state=LCE.INTEGRATE, verify='INTEGRATED')

        # Set CE04OSBP-LJ01C-06-CTDBPO108 device to DEPLOYED state
        passing &= self.transition_lcs_then_verify(resource_id=CE04OSBP_LJ01C_06_CTDBPO108_device._id, new_lcs_state=LCE.DEPLOY, verify='DEPLOYED')

        # Set CE04OSBP-LJ01C-06-CTDBPO108 Deployment to DEPLOYED state
        #self.transition_lcs_then_verify(resource_id=CE04OSBP_LJ01C_06_CTDBPO108_deploy._id, new_lcs_state=LCE.DEPLOY, verify='DEPLOYED')

        # Activate Deployment for CE04OSBP-LJ01C-06-CTDBPO108 instrument
        log.debug('---------    activate_deployment CE04OSBP-LJ01C-06-CTDBPO108 deployment -------------- ')
        self.OMS.activate_deployment(CE04OSBP_LJ01C_06_CTDBPO108_deploy._id)
        passing &= self.validate_deployment_activated(CE04OSBP_LJ01C_06_CTDBPO108_deploy._id)

        # (optional) Add/register CE04OSBP-LJ01C-06-CTDBPO108 instrument agent to parent agent

        # (optional) Start CE04OSBP-LJ01C-06-CTDBPO108 instrument agent with simulator

        # Set all DataProducts for CE04OSBP-LJ01C-06-CTDBPO108 to DEPLOYED state


        # (optional) Create a substitute Deployment for site CE04OSBP-LJ01C-06-CTDBPO108 with a comparable device
        CE04OSBP_LJ01C_06_CTDBPO108_isite = self.retrieve_ooi_asset('CE04OSBP-LJ01C-06-CTDBPO108')

        ## create device here: retrieve CTD Mooring on Mooring Riser 001 - similiar?
        GP03FLMB_RI001_10_CTDMOG999_ID_idevice = self.retrieve_ooi_asset('GP03FLMB-RI001-10-CTDMOG999_ID')

        deploy_id_2 = self.create_basic_deployment(name='CE04OSBP-LJ01C-06-CTDBPO108_DEP2', description='substitute Deployment for site CE04OSBP-LJ01C-06-CTDBPO108 with a comparable device')
        self.IMS.deploy_instrument_device(instrument_device_id=GP03FLMB_RI001_10_CTDMOG999_ID_idevice._id, deployment_id=deploy_id_2)
        self.OMS.deploy_instrument_site(instrument_site_id=CE04OSBP_LJ01C_06_CTDBPO108_isite._id, deployment_id=deploy_id_2)
        self.dump_deployment(deploy_id_2)

        # (optional) Activate this second deployment - check first deployment is deactivated
        self.OMS.deactivate_deployment(CE04OSBP_LJ01C_06_CTDBPO108_deploy._id)
        passing &= self.validate_deployment_deactivated(CE04OSBP_LJ01C_06_CTDBPO108_deploy._id)


        # log.debug('Activate deployment deploy_id_2')
        # self.get_deployment_ids(deploy_id_2)
        # self.dump_deployment(deploy_id_2, "deploy_id_2")
        # self.OMS.activate_deployment(deploy_id_2)
        # passing &= self.validate_deployment_deactivated(CE04OSBP_LJ01C_06_CTDBPO108_deploy._id)
        #
        # # (optional) Set first CE04OSBP-LJ01C-06-CTDBPO108 Deployment to INTEGRATED state
        # passing &= self.transition_lcs_then_verify(resource_id=CE04OSBP_LJ01C_06_CTDBPO108_deploy._id, new_lcs_state=LCE.INTEGRATE, verify='INTEGRATED')
        #
        # # Set first CE04OSBP-LJ01C-06-CTDBPO108 device to INTEGRATED state
        # passing &= self.transition_lcs_then_verify(resource_id=CE04OSBP_LJ01C_06_CTDBPO108_device._id, new_lcs_state=LCE.INTEGRATE, verify='INTEGRATED')
        #
        #
        # # (optional) Create a third Deployment for site CE04OSBP-LJ01C-06-CTDBPO108 with a same device from first deployment
        # deploy_id_3 = self.create_basic_deployment(name='CE04OSBP-LJ01C-06-CTDBPO108_DEP3', description='substitute Deployment for site CE04OSBP-LJ01C-06-CTDBPO108 with same device as first')
        # self.IMS.deploy_instrument_device(instrument_device_id=GP03FLMB_RI001_10_CTDMOG999_ID_idevice._id, deployment_id=deploy_id_3)
        # self.OMS.deploy_instrument_site(instrument_site_id=CE04OSBP_LJ01C_06_CTDBPO108_isite._id, deployment_id=deploy_id_3)
        # self.dump_deployment(deploy_id_3)
        #
        #
        # # Set first CE04OSBP-LJ01C-06-CTDBPO108 device to DEPLOYED state
        # passing &= self.transition_lcs_then_verify(resource_id=CE04OSBP_LJ01C_06_CTDBPO108_device._id, new_lcs_state=LCE.DEPLOY, verify='DEPLOYED')
        #
        # # (optional) Activate this third deployment - check second deployment is deactivated
        # log.debug('Activate deployment deploy_id_3')
        # self.dump_deployment(deploy_id_3)
        # self.OMS.activate_deployment(deploy_id_3)
        # #todo: check second deployment is deactivated

        return passing




    def check_data_product_reference(self, reference_designator, output=[]):
        passing = True

        data_product_ids, _ = self.RR.find_resources_ext(alt_id_ns='PRE', alt_id='%s_DPI1' % reference_designator, id_only=True) # Assuming DPI1 is parsed
        passing &= self.assertEquals(len(data_product_ids), 1)

        if not data_product_ids:
            return passing

        # Let's go ahead and activate it
        data_product_id = data_product_ids[0]
        self.dpclient.activate_data_product_persistence(data_product_id)
        self.addCleanup(self.dpclient.suspend_data_product_persistence, data_product_id)

        dataset_ids, _ = self.RR.find_objects(data_product_id, PRED.hasDataset, id_only=True)
        passing &= self.assertEquals(len(dataset_ids), 1)
        if not dataset_ids:
            return passing
        dataset_id = dataset_ids[0]

        stream_def_ids, _ = self.RR.find_objects(data_product_id, PRED.hasStreamDefinition, id_only=True)
        passing &= self.assertEquals(len(dataset_ids), 1)
        if not stream_def_ids:
            return passing
        stream_def_id = stream_def_ids[0]
        output.append((data_product_id, stream_def_id, dataset_id))
        return passing

    def check_tempsf_instrument_data_product(self, reference_designator):
        passing = True
        info_list = []
        passing &= self.check_data_product_reference(reference_designator, info_list)
        if not passing: return passing
        data_product_id, stream_def_id, dataset_id = info_list.pop()

        now = time.time()
        ntp_now = now + 2208988800

        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
        rdt['time'] = [ntp_now]
        rdt['temperature'] = [[ 25.3884, 26.9384, 24.3394, 23.3401, 22.9832,
            29.4434, 26.9873, 15.2883, 16.3374, 14.5883, 15.7253, 18.4383,
            15.3488, 17.2993, 10.2111, 11.5993, 10.9345, 9.4444, 9.9876,
            10.9834, 11.0098, 5.3456, 4.2994, 4.3009]]

        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)
        ParameterHelper.publish_rdt_to_data_product(data_product_id, rdt)
        passing &= self.assertTrue(dataset_monitor.event.wait(20))
        if not passing: return passing

        granule = self.data_retriever.retrieve(dataset_id)
        rdt = RecordDictionaryTool.load_from_granule(granule)
        passing &= self.assert_array_almost_equal(rdt['time'], [ntp_now])
        passing &= self.assert_array_almost_equal(rdt['temperature'], [[
            25.3884, 26.9384, 24.3394, 23.3401, 22.9832, 29.4434, 26.9873,
            15.2883, 16.3374, 14.5883, 15.7253, 18.4383, 15.3488, 17.2993,
            10.2111, 11.5993, 10.9345, 9.4444, 9.9876, 10.9834, 11.0098,
            5.3456, 4.2994, 4.3009]])
        return passing
    
    def check_trhph_instrument_data_products(self, reference_designator):
        passing = True
        info_list = []
        passing &= self.check_data_product_reference(reference_designator, info_list)
        if not passing:
            return passing

        data_product_id, stream_def_id, dataset_id = info_list.pop()

        pdict = self.RR2.find_parameter_dictionary_of_stream_definition_using_has_parameter_dictionary(stream_def_id)
        passing &= self.assertEquals(pdict.name, 'trhph_sample')

        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)

        # calibration constants
        a = 1.98e-9
        b = -2.45e-6
        c = 9.28e-4
        d = -0.0888
        e = 0.731

        V_s = 1.506
        V_c = 0.
        T = 11.8

        r1 = 0.906
        r2 = 4.095
        r3 = 4.095

        ORP_V = 1.806
        Cl = np.nan

        offset = 2008
        gain = 4.0
        # Normally this would be 50 per the DPS but the precision is %4.0f which truncates the values to the nearest 1...
        ORP = ((ORP_V * 1000.) - offset) / gain

        ntp_now = time.time() + 2208988800

        rdt['cc_a'] = [a]
        rdt['cc_b'] = [b]
        rdt['cc_c'] = [c]
        rdt['cc_d'] = [d]
        rdt['cc_e'] = [e]
        rdt['ref_temp_volts'] = [V_s]
        rdt['resistivity_temp_volts'] = [V_c]
        rdt['eh_sensor'] = [ORP_V]
        rdt['resistivity_5'] = [r1]
        rdt['resistivity_x1'] = [r2]
        rdt['resistivity_x5'] = [r3]
        rdt['cc_offset'] = [offset]
        rdt['cc_gain'] = [gain]
        rdt['time'] = [ntp_now]

        passing &= self.assert_array_almost_equal(rdt['vent_fluid_temperaure'], [T], 2)
        passing &= self.assert_array_almost_equal(rdt['vent_fluid_chloride_conc'], [Cl], 4)
        passing &= self.assert_array_almost_equal(rdt['vent_fluid_orp'], [ORP], 4)

        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)
        ParameterHelper.publish_rdt_to_data_product(data_product_id, rdt)
        passing &= self.assertTrue(dataset_monitor.event.wait(60))
        if not passing: return passing

        granule = self.data_retriever.retrieve(dataset_id)
        rdt = RecordDictionaryTool.load_from_granule(granule)
        
        passing &= self.assert_array_almost_equal(rdt['vent_fluid_temperaure'], [T], 2)
        passing &= self.assert_array_almost_equal(rdt['vent_fluid_chloride_conc'], [Cl], 4)
        passing &= self.assert_array_almost_equal(rdt['vent_fluid_orp'], [ORP], 4)

        return passing

    def check_vel3d_instrument_data_products(self, reference_designator):
        passing = True
        info_list = []
        passing &= self.check_data_product_reference(reference_designator, info_list)
        if not passing:
            return passing
        data_product_id, stream_def_id, dataset_id = info_list.pop()

        pdict = self.RR2.find_parameter_dictionary_of_stream_definition_using_has_parameter_dictionary(stream_def_id)
        self.assertEquals(pdict.name, 'vel3d_b_sample')

        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
        lat = 14.6846
        lon = -51.044
        ts = np.array([3319563600, 3319567200, 3319570800, 3319574400,
            3319578000, 3319581600, 3319585200, 3319588800, 3319592400,
            3319596000], dtype=np.float)

        ve = np.array([ -3.2,  0.1,  0. ,  2.3, -0.1,  5.6,  5.1,  5.8,
            8.8, 10.3])

        vn = np.array([ 18.2,  9.9, 12. ,  6.6, 7.4,  3.4, -2.6,  0.2,
            -1.5,  4.1])
        vu = np.array([-1.1, -0.6, -1.4, -2, -1.7, -2, 1.3, -1.6, -1.1, -4.5])
        ve_expected = np.array([-0.085136, -0.028752, -0.036007, 0.002136,
            -0.023158, 0.043218, 0.056451, 0.054727, 0.088446, 0.085952])
        vn_expected = np.array([ 0.164012,  0.094738,  0.114471,  0.06986,  0.07029,
                    0.049237, -0.009499,  0.019311,  0.012096,  0.070017])
        vu_expected = np.array([-0.011, -0.006, -0.014, -0.02, -0.017, -0.02,
            0.013, -0.016, -0.011, -0.045])

        
        rdt['time'] = ts
        rdt['lat'] = [lat] * 10
        rdt['lon'] = [lon] * 10
        rdt['turbulent_velocity_east'] = ve
        rdt['turbulent_velocity_north'] = vn
        rdt['turbulent_velocity_up'] = vu

        passing &= self.assert_array_almost_equal(rdt['eastward_turbulent_velocity'],
                ve_expected)
        passing &= self.assert_array_almost_equal(rdt['northward_turbulent_velocity'],
                vn_expected)
        passing &= self.assert_array_almost_equal(rdt['upward_turbulent_velocity'],
                vu_expected)


        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)
        ParameterHelper.publish_rdt_to_data_product(data_product_id, rdt)
        passing &= self.assertTrue(dataset_monitor.event.wait(20))
        if not passing: return passing

        granule = self.data_retriever.retrieve(dataset_id)
        rdt = RecordDictionaryTool.load_from_granule(granule)
        passing &= self.assert_array_almost_equal(rdt['eastward_turbulent_velocity'],
                ve_expected)
        passing &= self.assert_array_almost_equal(rdt['northward_turbulent_velocity'],
                vn_expected)
        passing &= self.assert_array_almost_equal(rdt['upward_turbulent_velocity'],
                vu_expected)
        return passing

    
    def check_presta_instrument_data_products(self, reference_designator):
        # Check the parsed data product make sure it's got everything it needs and can be published persisted etc.

        # Absolute Pressure (SFLPRES_L0) is what comes off the instrumnet, SFLPRES_L1 is a pfunc
        # Let's go ahead and publish some fake data!!!
        # According to https://alfresco.oceanobservatories.org/alfresco/d/d/workspace/SpacesStore/63e16865-9d9e-4b11-b0b3-d5658faa5080/1341-00230_Data_Product_Spec_SFLPRES_OOI.pdf
        # Appendix A. Example 1.
        # p_psia_tide = 14.8670
        # the tide should be 10.2504
        passing = True
        

        info_list = []
        passing &= self.check_data_product_reference(reference_designator, info_list)
        if not passing:
            return passing
        data_product_id, stream_def_id, dataset_id = info_list.pop()

        now = time.time()
        ntp_now = now + 2208988800.

        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
        rdt['time'] = [ntp_now]
        rdt['absolute_pressure'] = [14.8670]
        passing &= self.assert_array_almost_equal(rdt['seafloor_pressure'], [10.2504], 4)
        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)

        ParameterHelper.publish_rdt_to_data_product(data_product_id, rdt)
        self.assertTrue(dataset_monitor.event.wait(20)) # Bumped to 20 to keep buildbot happy
        if not passing: return passing

        granule = self.data_retriever.retrieve(dataset_id)

        rdt = RecordDictionaryTool.load_from_granule(granule)
        passing &= self.assert_array_almost_equal(rdt['time'], [ntp_now])
        passing &= self.assert_array_almost_equal(rdt['seafloor_pressure'], [10.2504], 4)
        passing &= self.assert_array_almost_equal(rdt['absolute_pressure'], [14.8670], 4)

        return passing

    def check_rsn_instrument_data_product(self):
        passing = True
        # for RS03AXBS-MJ03A-06-PRESTA301 (PREST-A) there are a few listed data products
        # Parsed, Engineering
        # SFLPRES-0 SFLPRES-1
        # Check for the two data products and make sure they have the proper parameters
        # SFLPRES-0 should 
        data_products, _ = self.RR.find_resources_ext(alt_id_ns='PRE', alt_id='RS03AXBS-MJ03A-06-PRESTA301_SFLPRES_L0_DPID', id_only=True)
        passing &=self.assertTrue(len(data_products)==1)
        if not data_products:
            return passing

        data_product_id = data_products[0]
        
        stream_defs, _ = self.RR.find_objects(data_product_id,PRED.hasStreamDefinition,id_only=False)
        passing &= self.assertTrue(len(stream_defs)==1)
        if not stream_defs:
            return passing

        # Assert that the stream definition has the correct reference designator
        stream_def = stream_defs[0]
        passing &= self.assertEquals(stream_def.stream_configuration['reference_designator'], 'RS03AXBS-MJ03A-06-PRESTA301')

        # Get the pdict and make sure that the parameters corresponding to the available fields 
        # begin with the appropriate data product identifier

        pdict_ids, _ = self.RR.find_objects(stream_def, PRED.hasParameterDictionary, id_only=True)
        passing &= self.assertEquals(len(pdict_ids), 1)
        if not pdict_ids:
            return passing

        pdict_id = pdict_ids[0]
        
        pdict = DatasetManagementService.get_parameter_dictionary(pdict_id)
        available_params = [pdict.get_context(i) for i in pdict.keys() if i in stream_def.available_fields]
        for p in available_params:
            if p.name=='time': # Ignore the domain parameter
                continue
            passing &= self.assertTrue(p.ooi_short_name.startswith('SFLPRES'))
        passing &= self.check_presta_instrument_data_products('RS01SLBS-MJ01A-06-PRESTA101')
        passing &= self.check_vel3d_instrument_data_products( 'RS01SLBS-MJ01A-12-VEL3DB101')
        passing &= self.check_presta_instrument_data_products('RS03AXBS-MJ03A-06-PRESTA301')
        passing &= self.check_vel3d_instrument_data_products( 'RS03AXBS-MJ03A-12-VEL3DB301')
        passing &= self.check_tempsf_instrument_data_product( 'RS03ASHS-MJ03B-07-TMPSFA301')
        passing &= self.check_vel3d_instrument_data_products( 'RS03INT2-MJ03D-12-VEL3DB304')
        passing &= self.check_trhph_instrument_data_products( 'RS03INT1-MJ03C-10-TRHPHA301')

        self.data_product_management.activate_data_product_persistence(data_product_id)
        dataset_id = self.RR2.find_dataset_id_of_data_product_using_has_dataset(data_product_id)
        granule = self.data_retriever.retrieve(dataset_id)
        rdt = RecordDictionaryTool.load_from_granule(granule)
        self.assert_array_almost_equal(rdt['seafloor_pressure'], [10.2504], 4)
        self.assert_array_almost_equal(rdt['absolute_pressure'], [14.8670], 4)
        self.data_product_management.suspend_data_product_persistence(data_product_id) # Should do nothing and not raise anything

        
        return passing


    def check_glider(self):
        '''
        # Check that glider GP05MOAS-GL001 assembly is defined by OOI preload (3 instruments)
        '''
        passing = True
        GP05MOAS_GL001_device = self.retrieve_ooi_asset('GP05MOAS-GL001_PD')
        child_devs, assns =self.RR.find_objects(subject=GP05MOAS_GL001_device._id, predicate=PRED.hasDevice, id_only=True)
        passing &= self.assertEquals(len(child_devs), 3)

        # Set GP05MOAS-GL001 Deployment to DEPLOYED
        GP05MOAS_GL001_deploy = self.retrieve_ooi_asset('GP05MOAS-GL001_DEP')
        passing &= self.transition_lcs_then_verify(resource_id=GP05MOAS_GL001_deploy._id, new_lcs_state=LCE.DEPLOY, verify='DEPLOYED')

        # Activate Deployment for GP05MOAS-GL001
        #self.OMS.activate_deployment(GP05MOAS_GL001_deploy._id)

        # Deactivate Deployment for GP05MOAS-GL001
        #self.OMS.deactivate_deployment(GP05MOAS_GL001_deploy._id)


        # Create a new Deployment resource X without any assignment
        x_deploy_id = self.create_basic_deployment(name='X_Deployment', description='new Deployment resource X without any assignment')

        # Assign Deployment X to site GP05MOAS-GL001
        GP05MOAS_GL001_psite = self.retrieve_ooi_asset('GP05MOAS-GL001')
        self.OMS.deploy_platform_site(GP05MOAS_GL001_psite._id, x_deploy_id)

        # Assign Deployment X to first device for GP05MOAS-GL001
        GP05MOAS_GL001_device = self.retrieve_ooi_asset('GP05MOAS-GL001_PD')
        self.IMS.deploy_platform_device(GP05MOAS_GL001_device._id, x_deploy_id)

        # Set GP05MOAS-GL001 Deployment to PLANNED state
        #self.transition_lcs_then_verify(resource_id=x_deploy_id, new_lcs_state=LCE.PLAN, verify='PLANNED')
        # ??? already in planned

        # Set second GP05MOAS-GL001 Deployment to DEPLOYED
        passing &= self.transition_lcs_then_verify(resource_id=x_deploy_id, new_lcs_state=LCE.DEPLOY, verify='DEPLOYED')
        self.dump_deployment(x_deploy_id)

        # Activate second Deployment for GP05MOAS-GL001
        #self.OMS.activate_deployment(x_deploy_id)

        # Deactivate second Deployment for GP05MOAS-GL001
        #self.OMS.deactivate_deployment(x_deploy_id)
        return passing


    def check_cg_assembly(self):
        passing = True

        # Set several CE01ISSM-RI002-* instrument devices to DEVELOPED state

        # Assemble several CE01ISSM-RI002-* instruments to a CG CE01ISSM-RI002 component platform

        # Set several CE01ISSM-RI002-* instrument devices to INTEGRATED state

        # Assemble CE01ISSM-RI002 platform to CG CE01ISSM-LM001 station platform

        # Set CE01ISSM-RI002 component device to INTEGRATED state

        # Set CE01ISSM-LM001 station device to INTEGRATED state

        # Set CE01ISSM-LM001 station device to DEPLOYED state (children maybe too?)

        # Set CE01ISSM-LM001 Deployment to DEPLOYED

        # Activate CE01ISSM-LM001 platform assembly deployment


        # Dectivate CE01ISSM-LM001 platform assembly deployment

        # Set CE01ISSM-LM001 Deployment to INTEGRATED state

        # Set CE01ISSM-LM001 station device to INTEGRATED state

        # Set CE01ISSM-RI002 component device to INTEGRATED state

        # Set CE01ISSM-RI002 component device to INTEGRATED state

        # Disassemble CE01ISSM-RI002 platform from CG CE01ISSM-LM001 station platform

        # Disassemble all CE01ISSM-RI002-* instruments from a CG CE01ISSM-RI002 component platform


        # Retire instrument one for CE01ISSM-RI002-*

        # Retire device one for CE01ISSM-RI002

        # Retire device one for CE01ISSM-LM001

        return passing

    # -------------------------------------------------------------------------

    def retrieve_ooi_asset(self, alt_id='', namespace='PRE'):
        dp_list, _  = self.RR.find_resources_ext(alt_id_ns=namespace, alt_id=alt_id)
        self.assertEquals(len(dp_list), 1)
        return dp_list[0]

    def transition_lcs_then_verify(self, resource_id, new_lcs_state, verify):
        ret = self.RR2.advance_lcs(resource_id, new_lcs_state)
        resource_obj = self.RR.read(resource_id)
        return self.assertEquals(resource_obj.lcstate, verify)

    def create_basic_deployment(self, name='', description=''):
        start = IonTime(datetime.datetime(2013,1,1))
        end = IonTime(datetime.datetime(2014,1,1))
        temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=start.to_string(), end_datetime=end.to_string())
        deployment_obj = IonObject(RT.Deployment,
            name=name,
            description=description,
            context=IonObject(OT.CabledNodeDeploymentContext),
            constraint_list=[temporal_bounds])
        return self.OMS.create_deployment(deployment_obj)

    def validate_deployment_activated(self, deployment_id=''):
        site_id, device_id = self.get_deployment_ids(deployment_id)
        assocs = self.RR.find_associations(subject=site_id, predicate=PRED.hasDevice, object=device_id)
        return self.assertEquals(len(assocs), 1)

    def validate_deployment_deactivated(self, deployment_id=''):
        site_id, device_id = self.get_deployment_ids(deployment_id)
        assocs = self.RR.find_associations(subject=site_id, predicate=PRED.hasDevice, object=device_id)
        return self.assertEquals(len(assocs), 0)

    def dump_deployment(self, deployment_id='', name=""):
        #site_id, device_id = self.get_deployment_ids(deployment_id)
        resource_list,_ = self.RR.find_subjects(predicate=PRED.hasDeployment, object=deployment_id, id_only=True)
        resource_list.append(deployment_id)
        resources = self.RR.read_mult(resource_list )
        log.debug('---------   dump_deployment %s summary---------------', name)
        for resource in resources:
            log.debug('%s: %s (%s)', resource._get_type(), resource.name, resource._id)

        log.debug('---------   dump_deployment %s full dump ---------------', name)

        for resource in resources:
            log.debug('resource: %s ', resource)
        log.debug('---------   dump_deployment %s end  ---------------', name)


        #assocs = self.container.resource_registry.find_assoctiations(anyside=deployment_id)
#        assocs = Container.instance.resource_registry.find_assoctiations(anyside=deployment_id)
#        log.debug('---------   dump_deployment  ---------------')
#        for assoc in assocs:
#            log.debug('SUBJECT: %s      PREDICATE: %s OBJET: %s', assoc.s, assoc.p, assoc.o)
#        log.debug('---------   dump_deployment  end  ---------------')


    def get_deployment_ids(self, deployment_id=''):
        devices = []
        sites = []
        idevice_list,_ = self.RR.find_subjects(RT.InstrumentDevice, PRED.hasDeployment, deployment_id, id_only=True)
        pdevice_list,_ = self.RR.find_subjects(RT.PlatformDevice, PRED.hasDeployment, deployment_id, id_only=True)
        devices = idevice_list + pdevice_list
        self.assertEquals(1, len(devices))
        isite_list,_ = self.RR.find_subjects(RT.InstrumentSite, PRED.hasDeployment, deployment_id, id_only=True)
        psite_list,_ = self.RR.find_subjects(RT.PlatformSite, PRED.hasDeployment, deployment_id, id_only=True)
        sites = isite_list + psite_list
        self.assertEquals(1, len(sites))
        return sites[0], devices[0]

    def _find_resource_in_list(self, res_list, attr, attr_val, assert_found=True):
        for res in res_list:
            v = getattr(res, attr, None)
            if v == attr_val:
                return res
        if assert_found:
            self.assertTrue(False, "Attribute %s value %s not found in list" % (attr, attr_val))
        return None

    # -------------------------------------------------------------------------

    def _get_caller(self):
        s = inspect.stack()
        return "%s:%s" % (s[2][1], s[2][2])

    @assertion_wrapper
    def assert_array_almost_equal(self, *args, **kwargs):
        np.testing.assert_array_almost_equal(*args, **kwargs)

    @assertion_wrapper
    def assertEquals(self, *args, **kwargs):
        IonIntegrationTestCase.assertEquals(self, *args, **kwargs)

    @assertion_wrapper
    def assertTrue(self, *args, **kwargs):
        IonIntegrationTestCase.assertTrue(self, *args, **kwargs)
class TestDMEnd2End(IonIntegrationTestCase):
    def setUp(self): # Love the non pep-8 convention
        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.process_dispatcher   = ProcessDispatcherServiceClient()
        self.pubsub_management    = PubsubManagementServiceClient()
        self.resource_registry    = ResourceRegistryServiceClient()
        self.dataset_management   = DatasetManagementServiceClient()
        self.ingestion_management = IngestionManagementServiceClient()
        self.data_retriever       = DataRetrieverServiceClient()
        self.event                = Event()
        self.exchange_space_name  = 'test_granules'
        self.exchange_point_name  = 'science_data'       
        self.i                    = 0
        self.cci                  = 0

    #--------------------------------------------------------------------------------
    # Helper/Utility methods
    #--------------------------------------------------------------------------------
        
    def create_dataset(self, parameter_dict_id=''):
        '''
        Creates a time-series dataset
        '''
        if not parameter_dict_id:
            parameter_dict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)

        dataset = Dataset('test_dataset_%i'%self.i)
        dataset_id = self.dataset_management.create_dataset(dataset, parameter_dictionary_id=parameter_dict_id)
        self.addCleanup(self.dataset_management.delete_dataset, dataset_id)
        return dataset_id
    
    def get_datastore(self, dataset_id):
        '''
        Gets an instance of the datastore
            This method is primarily used to defeat a bug where integration tests in multiple containers may sometimes 
            delete a CouchDB datastore and the other containers are unaware of the new state of the datastore.
        '''
        dataset = self.dataset_management.read_dataset(dataset_id)
        datastore_name = dataset.datastore_name
        datastore = self.container.datastore_manager.get_datastore(datastore_name, DataStore.DS_PROFILE.SCIDATA)
        return datastore
    
    def get_ingestion_config(self):
        '''
        Grab the ingestion configuration from the resource registry
        '''
        # The ingestion configuration should have been created by the bootstrap service 
        # which is configured through r2deploy.yml

        ingest_configs, _  = self.resource_registry.find_resources(restype=RT.IngestionConfiguration,id_only=True)
        return ingest_configs[0]

    def launch_producer(self, stream_id=''):
        '''
        Launch the producer
        '''
        pid = self.container.spawn_process('better_data_producer', 'ion.processes.data.example_data_producer', 'BetterDataProducer', {'process':{'stream_id':stream_id}})
        self.addCleanup(self.container.terminate_process, pid)

    def make_simple_dataset(self):
        '''
        Makes a stream, a stream definition and a dataset, the essentials for most of these tests
        '''
        pdict_id             = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        stream_def_id        = self.pubsub_management.create_stream_definition('ctd data %i' % self.i, parameter_dictionary_id=pdict_id)
        self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id)
        stream_id, route     = self.pubsub_management.create_stream('ctd stream %i' % self.i, 'xp1', stream_definition_id=stream_def_id)
        self.addCleanup(self.pubsub_management.delete_stream, stream_id)

        dataset_id = self.create_dataset(pdict_id)

        # self.get_datastore(dataset_id)
        self.i += 1
        return stream_id, route, stream_def_id, dataset_id

    def publish_hifi(self,stream_id,stream_route,offset=0):
        '''
        Publish deterministic data
        '''

        pub = StandaloneStreamPublisher(stream_id, stream_route)

        stream_def = self.pubsub_management.read_stream_definition(stream_id=stream_id)
        stream_def_id = stream_def._id
        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
        rdt['time'] = np.arange(10) + (offset * 10)
        rdt['temp'] = np.arange(10) + (offset * 10)
        pub.publish(rdt.to_granule())

    def publish_fake_data(self,stream_id, route):
        '''
        Make four granules
        '''
        for i in xrange(4):
            self.publish_hifi(stream_id,route,i)

    def start_ingestion(self, stream_id, dataset_id):
        '''
        Starts ingestion/persistence for a given dataset
        '''
        ingest_config_id = self.get_ingestion_config()
        self.ingestion_management.persist_data_stream(stream_id=stream_id, ingestion_configuration_id=ingest_config_id, dataset_id=dataset_id)
    
    def stop_ingestion(self, stream_id):
        ingest_config_id = self.get_ingestion_config()
        self.ingestion_management.unpersist_data_stream(stream_id=stream_id, ingestion_configuration_id=ingest_config_id)

    def validate_granule_subscription(self, msg, route, stream_id):
        '''
        Validation for granule format
        '''
        if msg == {}:
            return
        rdt = RecordDictionaryTool.load_from_granule(msg)
        log.info('%s', rdt.pretty_print())
        self.assertIsInstance(msg,Granule,'Message is improperly formatted. (%s)' % type(msg))
        self.event.set()

    def wait_until_we_have_enough_granules(self, dataset_id='',data_size=40):
        '''
        Loops until there is a sufficient amount of data in the dataset
        '''
        done = False
        with gevent.Timeout(40):
            while not done:
                extents = self.dataset_management.dataset_extents(dataset_id, 'time')
                granule = self.data_retriever.retrieve_last_data_points(dataset_id, 1)
                rdt     = RecordDictionaryTool.load_from_granule(granule)
                if rdt['time'] and rdt['time'][0] != rdt._pdict.get_context('time').fill_value and extents >= data_size:
                    done = True
                else:
                    gevent.sleep(0.2)


    #--------------------------------------------------------------------------------
    # Test Methods
    #--------------------------------------------------------------------------------

    def test_dm_end_2_end(self):
        #--------------------------------------------------------------------------------
        # Set up a stream and have a mock instrument (producer) send data
        #--------------------------------------------------------------------------------
        self.event.clear()

        # Get a precompiled parameter dictionary with basic ctd fields
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict',id_only=True)
        context_ids = self.dataset_management.read_parameter_contexts(pdict_id, id_only=True)

        # Add a field that supports binary data input.
        bin_context = ParameterContext('binary',  param_type=ArrayType())
        context_ids.append(self.dataset_management.create_parameter_context('binary', bin_context.dump()))
        # Add another field that supports dictionary elements.
        rec_context = ParameterContext('records', param_type=RecordType())
        context_ids.append(self.dataset_management.create_parameter_context('records', rec_context.dump()))

        pdict_id = self.dataset_management.create_parameter_dictionary('replay_pdict', parameter_context_ids=context_ids, temporal_context='time')
        
        stream_definition = self.pubsub_management.create_stream_definition('ctd data', parameter_dictionary_id=pdict_id)


        stream_id, route = self.pubsub_management.create_stream('producer', exchange_point=self.exchange_point_name, stream_definition_id=stream_definition)

        #--------------------------------------------------------------------------------
        # Start persisting the data on the stream 
        # - Get the ingestion configuration from the resource registry
        # - Create the dataset
        # - call persist_data_stream to setup the subscription for the ingestion workers
        #   on the stream that you specify which causes the data to be persisted
        #--------------------------------------------------------------------------------

        ingest_config_id = self.get_ingestion_config()
        dataset_id = self.create_dataset(pdict_id)
        self.ingestion_management.persist_data_stream(stream_id=stream_id, ingestion_configuration_id=ingest_config_id, dataset_id=dataset_id)
        self.addCleanup(self.stop_ingestion, stream_id)

        #--------------------------------------------------------------------------------
        # Now the granules are ingesting and persisted
        #--------------------------------------------------------------------------------

        self.launch_producer(stream_id)
        self.wait_until_we_have_enough_granules(dataset_id,40)
        
        #--------------------------------------------------------------------------------
        # Now get the data in one chunk using an RPC Call to start_retreive
        #--------------------------------------------------------------------------------
        
        replay_data = self.data_retriever.retrieve(dataset_id)
        self.assertIsInstance(replay_data, Granule)
        rdt = RecordDictionaryTool.load_from_granule(replay_data)
        self.assertTrue((rdt['time'][:10] == np.arange(10)).all(),'%s' % rdt['time'][:])
        self.assertTrue((rdt['binary'][:10] == np.array(['hi']*10, dtype='object')).all())

        
        #--------------------------------------------------------------------------------
        # Now to try the streamed approach
        #--------------------------------------------------------------------------------
        replay_stream_id, replay_route = self.pubsub_management.create_stream('replay_out', exchange_point=self.exchange_point_name, stream_definition_id=stream_definition)
        self.replay_id, process_id =  self.data_retriever.define_replay(dataset_id=dataset_id, stream_id=replay_stream_id)
        log.info('Process ID: %s', process_id)

        replay_client = ReplayClient(process_id)

    
        #--------------------------------------------------------------------------------
        # Create the listening endpoint for the the retriever to talk to 
        #--------------------------------------------------------------------------------
        sub_id = self.pubsub_management.create_subscription(self.exchange_space_name,stream_ids=[replay_stream_id])
        self.addCleanup(self.pubsub_management.delete_subscription, sub_id)
        self.pubsub_management.activate_subscription(sub_id)
        self.addCleanup(self.pubsub_management.deactivate_subscription, sub_id)
        subscriber = StandaloneStreamSubscriber(self.exchange_space_name, self.validate_granule_subscription)
        subscriber.start()
        self.addCleanup(subscriber.stop)

        self.data_retriever.start_replay_agent(self.replay_id)

        self.assertTrue(replay_client.await_agent_ready(5), 'The process never launched')
        replay_client.start_replay()
        
        self.assertTrue(self.event.wait(10))

        self.data_retriever.cancel_replay_agent(self.replay_id)


        #--------------------------------------------------------------------------------
        # Test the slicing capabilities
        #--------------------------------------------------------------------------------

        granule = self.data_retriever.retrieve(dataset_id=dataset_id, query={'tdoa':slice(0,5)})
        rdt = RecordDictionaryTool.load_from_granule(granule)
        b = rdt['time'] == np.arange(5)
        self.assertTrue(b.all() if not isinstance(b,bool) else b)


    def test_coverage_transform(self):
        ph = ParameterHelper(self.dataset_management, self.addCleanup)
        pdict_id = ph.create_parsed()
        stream_def_id = self.pubsub_management.create_stream_definition('ctd parsed', parameter_dictionary_id=pdict_id)
        self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id)

        stream_id, route = self.pubsub_management.create_stream('example', exchange_point=self.exchange_point_name, stream_definition_id=stream_def_id)
        self.addCleanup(self.pubsub_management.delete_stream, stream_id)

        ingestion_config_id = self.get_ingestion_config()
        dataset_id = self.create_dataset(pdict_id)

        self.ingestion_management.persist_data_stream(stream_id=stream_id, ingestion_configuration_id=ingestion_config_id, dataset_id=dataset_id)
        self.addCleanup(self.ingestion_management.unpersist_data_stream, stream_id, ingestion_config_id)
        publisher = StandaloneStreamPublisher(stream_id, route)
        
        rdt = ph.get_rdt(stream_def_id)
        ph.fill_parsed_rdt(rdt)

        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)

        publisher.publish(rdt.to_granule())
        self.assertTrue(dataset_monitor.wait())

        replay_granule = self.data_retriever.retrieve(dataset_id)
        rdt_out = RecordDictionaryTool.load_from_granule(replay_granule)

        np.testing.assert_array_almost_equal(rdt_out['time'], rdt['time'])
        np.testing.assert_array_almost_equal(rdt_out['temp'], rdt['temp'])

        np.testing.assert_allclose(rdt_out['conductivity_L1'], np.array([42.914]))
        np.testing.assert_allclose(rdt_out['temp_L1'], np.array([20.]))
        np.testing.assert_allclose(rdt_out['pressure_L1'], np.array([3.068]))
        np.testing.assert_allclose(rdt_out['density'], np.array([1021.7144739593881], dtype='float32'))
        np.testing.assert_allclose(rdt_out['salinity'], np.array([30.935132729668283], dtype='float32'))


    def test_ingestion_pause(self):
        ctd_stream_id, route, stream_def_id, dataset_id = self.make_simple_dataset()
        ingestion_config_id = self.get_ingestion_config()
        self.start_ingestion(ctd_stream_id, dataset_id)
        self.addCleanup(self.stop_ingestion, ctd_stream_id)

        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
        rdt['time'] = np.arange(10)

        publisher = StandaloneStreamPublisher(ctd_stream_id, route)
        monitor = DatasetMonitor(dataset_id)
        self.addCleanup(monitor.stop)
        publisher.publish(rdt.to_granule())
        self.assertTrue(monitor.wait())
        granule = self.data_retriever.retrieve(dataset_id)


        self.ingestion_management.pause_data_stream(ctd_stream_id, ingestion_config_id)

        monitor.event.clear()
        rdt['time'] = np.arange(10,20)
        publisher.publish(rdt.to_granule())
        self.assertFalse(monitor.event.wait(1))

        self.ingestion_management.resume_data_stream(ctd_stream_id, ingestion_config_id)

        self.assertTrue(monitor.wait())

        granule = self.data_retriever.retrieve(dataset_id)
        rdt2 = RecordDictionaryTool.load_from_granule(granule)
        np.testing.assert_array_almost_equal(rdt2['time'], np.arange(20))

    def test_last_granule(self):
        stream_id, route, stream_def_id, dataset_id = self.make_simple_dataset()
        self.start_ingestion(stream_id, dataset_id)
        self.addCleanup(self.stop_ingestion, stream_id)

        self.publish_hifi(stream_id,route, 0)
        self.publish_hifi(stream_id,route, 1)
        

        self.wait_until_we_have_enough_granules(dataset_id,20) # I just need two


        success = False
        def verifier():
                replay_granule = self.data_retriever.retrieve_last_data_points(dataset_id, 10)

                rdt = RecordDictionaryTool.load_from_granule(replay_granule)

                comp = rdt['time'] == np.arange(10) + 10
                if not isinstance(comp,bool):
                    return comp.all()
                return False
        success = poll(verifier)

        self.assertTrue(success)

        success = False
        def verify_points():
                replay_granule = self.data_retriever.retrieve_last_data_points(dataset_id,5)

                rdt = RecordDictionaryTool.load_from_granule(replay_granule)

                comp = rdt['time'] == np.arange(15,20)
                if not isinstance(comp,bool):
                    return comp.all()
                return False
        success = poll(verify_points)

        self.assertTrue(success)

    def test_replay_with_parameters(self):
        #--------------------------------------------------------------------------------
        # Create the configurations and the dataset
        #--------------------------------------------------------------------------------
        # Get a precompiled parameter dictionary with basic ctd fields
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict',id_only=True)
        context_ids = self.dataset_management.read_parameter_contexts(pdict_id, id_only=True)

        # Add a field that supports binary data input.
        bin_context = ParameterContext('binary',  param_type=ArrayType())
        context_ids.append(self.dataset_management.create_parameter_context('binary', bin_context.dump()))
        # Add another field that supports dictionary elements.
        rec_context = ParameterContext('records', param_type=RecordType())
        context_ids.append(self.dataset_management.create_parameter_context('records', rec_context.dump()))

        pdict_id = self.dataset_management.create_parameter_dictionary('replay_pdict', parameter_context_ids=context_ids, temporal_context='time')
        

        stream_def_id = self.pubsub_management.create_stream_definition('replay_stream', parameter_dictionary_id=pdict_id)
        
        stream_id, route  = self.pubsub_management.create_stream('replay_with_params', exchange_point=self.exchange_point_name, stream_definition_id=stream_def_id)
        config_id  = self.get_ingestion_config()
        dataset_id = self.create_dataset(pdict_id)
        self.ingestion_management.persist_data_stream(stream_id=stream_id, ingestion_configuration_id=config_id, dataset_id=dataset_id)
        self.addCleanup(self.stop_ingestion, stream_id)

        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)

        self.publish_fake_data(stream_id, route)

        self.assertTrue(dataset_monitor.wait())

        query = {
            'start_time': 0 - 2208988800,
            'end_time':   19 - 2208988800,
            'stride_time' : 2,
            'parameters': ['time','temp']
        }
        retrieved_data = self.data_retriever.retrieve(dataset_id=dataset_id,query=query)

        rdt = RecordDictionaryTool.load_from_granule(retrieved_data)
        np.testing.assert_array_equal(rdt['time'], np.arange(0,20,2))
        self.assertEquals(set(rdt.iterkeys()), set(['time','temp']))

        extents = self.dataset_management.dataset_extents(dataset_id=dataset_id, parameters=['time','temp'])
        self.assertTrue(extents['time']>=20)
        self.assertTrue(extents['temp']>=20)

    def test_repersist_data(self):
        stream_id, route, stream_def_id, dataset_id = self.make_simple_dataset()
        self.start_ingestion(stream_id, dataset_id)
        self.publish_hifi(stream_id,route,0)
        self.publish_hifi(stream_id,route,1)
        self.wait_until_we_have_enough_granules(dataset_id,20)
        config_id = self.get_ingestion_config()
        self.ingestion_management.unpersist_data_stream(stream_id=stream_id,ingestion_configuration_id=config_id)
        self.ingestion_management.persist_data_stream(stream_id=stream_id,ingestion_configuration_id=config_id,dataset_id=dataset_id)
        self.addCleanup(self.stop_ingestion, stream_id)
        self.publish_hifi(stream_id,route,2)
        self.publish_hifi(stream_id,route,3)
        self.wait_until_we_have_enough_granules(dataset_id,40)
        success = False
        with gevent.timeout.Timeout(5):
            while not success:

                replay_granule = self.data_retriever.retrieve(dataset_id)

                rdt = RecordDictionaryTool.load_from_granule(replay_granule)

                comp = rdt['time'] == np.arange(0,40)
                if not isinstance(comp,bool):
                    success = comp.all()
                gevent.sleep(1)

        self.assertTrue(success)


    @unittest.skip('deprecated')
    def test_correct_time(self):

        # There are 2208988800 seconds between Jan 1 1900 and Jan 1 1970, i.e. 
        #  the conversion factor between unix and NTP time
        unix_now = np.floor(time.time())
        ntp_now  = unix_now + 2208988800 

        unix_ago = unix_now - 20
        ntp_ago  = unix_ago + 2208988800

        stream_id, route, stream_def_id, dataset_id = self.make_simple_dataset()
        coverage = DatasetManagementService._get_simplex_coverage(dataset_id, mode='a')
        coverage.insert_timesteps(20)
        coverage.set_parameter_values('time', np.arange(ntp_ago,ntp_now))
        
        temporal_bounds = self.dataset_management.dataset_temporal_bounds(dataset_id)

        self.assertTrue( np.abs(temporal_bounds[0] - unix_ago) < 2)
        self.assertTrue( np.abs(temporal_bounds[1] - unix_now) < 2)


    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Host requires file-system access to coverage files, CEI mode does not support.')
    def test_out_of_band_retrieve(self):
        # Setup the environemnt
        stream_id, route, stream_def_id, dataset_id = self.make_simple_dataset()
        self.start_ingestion(stream_id, dataset_id)
        
        # Fill the dataset
        self.publish_fake_data(stream_id, route)
        self.wait_until_we_have_enough_granules(dataset_id,40)

        # Retrieve the data
        granule = DataRetrieverService.retrieve_oob(dataset_id)
        rdt = RecordDictionaryTool.load_from_granule(granule)
        self.assertTrue((rdt['time'] == np.arange(40)).all())

    def publish_and_wait(self, dataset_id, granule):
        stream_ids, _ = self.resource_registry.find_objects(dataset_id, PRED.hasStream,id_only=True)
        stream_id=stream_ids[0]
        route = self.pubsub_management.read_stream_route(stream_id)
        publisher = StandaloneStreamPublisher(stream_id,route)
        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)
        publisher.publish(granule)
        self.assertTrue(dataset_monitor.wait())


    def test_sparse_values(self):
        ph = ParameterHelper(self.dataset_management, self.addCleanup)
        pdict_id = ph.create_sparse()
        stream_def_id = self.pubsub_management.create_stream_definition('sparse', parameter_dictionary_id=pdict_id)
        self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id)
        stream_id, route = self.pubsub_management.create_stream('example', exchange_point=self.exchange_point_name, stream_definition_id=stream_def_id)
        self.addCleanup(self.pubsub_management.delete_stream, stream_id)
        dataset_id = self.create_dataset(pdict_id)
        self.start_ingestion(stream_id,dataset_id)
        self.addCleanup(self.stop_ingestion, stream_id)

        # Publish initial granule
        # the first one has the sparse value set inside it, sets lat to 45 and lon to -71
        ntp_now = time.time() + 2208988800
        rdt = ph.get_rdt(stream_def_id)
        rdt['time'] = [ntp_now]
        rdt['internal_timestamp'] = [ntp_now]
        rdt['temp'] = [300000]
        rdt['preferred_timestamp'] = ['driver_timestamp']
        rdt['port_timestamp'] = [ntp_now]
        rdt['quality_flag'] = ['']
        rdt['lat'] = [45]
        rdt['conductivity'] = [4341400]
        rdt['driver_timestamp'] = [ntp_now]
        rdt['lon'] = [-71]
        rdt['pressure'] = [256.8]

        publisher = StandaloneStreamPublisher(stream_id, route)
        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)
        publisher.publish(rdt.to_granule())
        self.assertTrue(dataset_monitor.wait())
        dataset_monitor.reset()

        replay_granule = self.data_retriever.retrieve(dataset_id)
        rdt_out = RecordDictionaryTool.load_from_granule(replay_granule)

        # Check the values and make sure they're correct
        np.testing.assert_allclose(rdt_out['time'], rdt['time'])
        np.testing.assert_allclose(rdt_out['temp'], rdt['temp'])
        np.testing.assert_allclose(rdt_out['lat'], np.array([45]))
        np.testing.assert_allclose(rdt_out['lon'], np.array([-71]))

        np.testing.assert_allclose(rdt_out['conductivity_L1'], np.array([42.914]))
        np.testing.assert_allclose(rdt_out['temp_L1'], np.array([20.]))
        np.testing.assert_allclose(rdt_out['pressure_L1'], np.array([3.068]))
        np.testing.assert_allclose(rdt_out['density'], np.array([1021.7144739593881], dtype='float32'))
        np.testing.assert_allclose(rdt_out['salinity'], np.array([30.935132729668283], dtype='float32'))


        # We're going to change the lat/lon
        rdt = ph.get_rdt(stream_def_id)
        rdt['time'] = time.time() + 2208988800
        rdt['lat'] = [46]
        rdt['lon'] = [-73]
        
        publisher.publish(rdt.to_granule())
        self.assertTrue(dataset_monitor.wait())
        dataset_monitor.reset()


        replay_granule = self.data_retriever.retrieve(dataset_id)
        rdt_out = RecordDictionaryTool.load_from_granule(replay_granule)

        np.testing.assert_allclose(rdt_out['time'], rdt['time'])
        
        for i in xrange(9):
            ntp_now = time.time() + 2208988800
            rdt['time'] = [ntp_now]
            rdt['internal_timestamp'] = [ntp_now]
            rdt['temp'] = [300000]
            rdt['preferred_timestamp'] = ['driver_timestamp']
            rdt['port_timestamp'] = [ntp_now]
            rdt['quality_flag'] = [None]
            rdt['conductivity'] = [4341400]
            rdt['driver_timestamp'] = [ntp_now]
            rdt['pressure'] = [256.8]

            publisher.publish(rdt.to_granule())
            self.assertTrue(dataset_monitor.wait())
            dataset_monitor.reset()

        replay_granule = self.data_retriever.retrieve(dataset_id)
        rdt_out = RecordDictionaryTool.load_from_granule(replay_granule)

        np.testing.assert_allclose(rdt_out['pressure'], np.array([256.8] * 10))
        np.testing.assert_allclose(rdt_out['lat'], np.array([45] + [46] * 9))
        np.testing.assert_allclose(rdt_out['lon'], np.array([-71] + [-73] * 9))
class TestCTDTransformsIntegration(IonIntegrationTestCase):
    pdict_id = None

    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()
        #container = Container()
        #print 'starting container'
        #container.start()
        #print 'started container

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        print 'started services'

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.pubsubclient =  PubsubManagementServiceClient(node=self.container.node)
        self.ingestclient = IngestionManagementServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node)
        self.datasetclient =  DatasetManagementServiceClient(node=self.container.node)
        self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node)
        self.dataset_management = self.datasetclient

    def create_logger(self, name, stream_id=''):

        # logger process
        producer_definition = ProcessDefinition(name=name+'_logger')
        producer_definition.executable = {
            'module':'ion.processes.data.stream_granule_logger',
            'class':'StreamGranuleLogger'
        }

        logger_procdef_id = self.processdispatchclient.create_process_definition(process_definition=producer_definition)
        configuration = {
            'process':{
                'stream_id':stream_id,
                }
        }
        pid = self.processdispatchclient.schedule_process(process_definition_id= logger_procdef_id, configuration=configuration)

        return pid

    def _create_instrument_model(self):

        instModel_obj = IonObject(  RT.InstrumentModel,
            name='SBE37IMModel',
            description="SBE37IMModel"  )
        instModel_id = self.imsclient.create_instrument_model(instModel_obj)

        return instModel_id

    def _create_instrument_agent(self, instModel_id):

        raw_config = StreamConfiguration(stream_name='raw', parameter_dictionary_name='ctd_raw_param_dict')
        parsed_config = StreamConfiguration(stream_name='parsed', parameter_dictionary_name='ctd_parsed_param_dict')

        instAgent_obj = IonObject(RT.InstrumentAgent,
                                    name='agent007',
                                    description="SBE37IMAgent",
                                    driver_uri=DRV_URI_GOOD,
                                    stream_configurations = [raw_config, parsed_config] )
        instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj)

        self.imsclient.assign_instrument_model_to_instrument_agent(instModel_id, instAgent_id)

        return instAgent_id

    def _create_instrument_device(self, instModel_id):

        instDevice_obj = IonObject(RT.InstrumentDevice, name='SBE37IMDevice', description="SBE37IMDevice", serial_number="12345" )

        instDevice_id = self.imsclient.create_instrument_device(instrument_device=instDevice_obj)
        self.imsclient.assign_instrument_model_to_instrument_device(instModel_id, instDevice_id)

        log.debug("test_activateInstrumentSample: new InstrumentDevice id = %s    (SA Req: L4-CI-SA-RQ-241) ", instDevice_id)

        return instDevice_id

    def _create_instrument_agent_instance(self, instAgent_id,instDevice_id):


        port_agent_config = {
            'device_addr':  CFG.device.sbe37.host,
            'device_port':  CFG.device.sbe37.port,
            'process_type': PortAgentProcessType.UNIX,
            'binary_path': "port_agent",
            'port_agent_addr': 'localhost',
            'command_port': CFG.device.sbe37.port_agent_cmd_port,
            'data_port': CFG.device.sbe37.port_agent_data_port,
            'log_level': 5,
            'type': PortAgentType.ETHERNET
        }

        instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='SBE37IMAgentInstance',
            description="SBE37IMAgentInstance",
            port_agent_config = port_agent_config)

        instAgentInstance_id = self.imsclient.create_instrument_agent_instance(instAgentInstance_obj,
            instAgent_id,
            instDevice_id)

        return instAgentInstance_id

    def _create_param_dicts(self):
        tdom, sdom = time_series_domain()

        self.sdom = sdom.dump()
        self.tdom = tdom.dump()

        self.pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)

    def _create_input_data_products(self, ctd_stream_def_id, instDevice_id,  ):

        dp_obj = IonObject(RT.DataProduct,
            name='the parsed data',
            description='ctd stream test',
            temporal_domain = self.tdom,
            spatial_domain = self.sdom)

        ctd_parsed_data_product = self.dataproductclient.create_data_product(dp_obj, ctd_stream_def_id)

        self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=ctd_parsed_data_product)

        self.dataproductclient.activate_data_product_persistence(data_product_id=ctd_parsed_data_product)

        #---------------------------------------------------------------------------
        # Retrieve the id of the OUTPUT stream from the out Data Product
        #---------------------------------------------------------------------------

        stream_ids, _ = self.rrclient.find_objects(ctd_parsed_data_product, PRED.hasStream, None, True)

        pid = self.create_logger('ctd_parsed', stream_ids[0] )
        self.loggerpids.append(pid)

        #---------------------------------------------------------------------------
        # Create CTD Raw as the second data product
        #---------------------------------------------------------------------------
        if not self.pdict_id:
            self._create_param_dicts()
        raw_stream_def_id = self.pubsubclient.create_stream_definition(name='SBE37_RAW', parameter_dictionary_id=self.pdict_id)

        dp_obj = IonObject(RT.DataProduct,
            name='the raw data',
            description='raw stream test',
            temporal_domain = self.tdom,
            spatial_domain = self.sdom)

        ctd_raw_data_product = self.dataproductclient.create_data_product(dp_obj, raw_stream_def_id)

        self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=ctd_raw_data_product)

        self.dataproductclient.activate_data_product_persistence(data_product_id=ctd_raw_data_product)

        #---------------------------------------------------------------------------
        # Retrieve the id of the OUTPUT stream from the out Data Product
        #---------------------------------------------------------------------------

        stream_ids, _ = self.rrclient.find_objects(ctd_raw_data_product, PRED.hasStream, None, True)

        #---------------------------------------------------------------------------
        # Retrieve the id of the OUTPUT stream from the out Data Product
        #---------------------------------------------------------------------------

        stream_ids, _ = self.rrclient.find_objects(ctd_raw_data_product, PRED.hasStream, None, True)
        print 'Data product streams2 = ', stream_ids

        return ctd_parsed_data_product

    def _create_data_process_definitions(self):

        #-------------------------------------------------------------------------------------
        # L0 Conductivity - Temperature - Pressure: Data Process Definition
        #-------------------------------------------------------------------------------------

        dpd_obj = IonObject(RT.DataProcessDefinition,
            name='ctd_L0_all',
            description='transform ctd package into three separate L0 streams',
            module='ion.processes.data.transforms.ctd.ctd_L0_all',
            class_name='ctd_L0_all')
        self.ctd_L0_all_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)

        #-------------------------------------------------------------------------------------
        # L1 Conductivity: Data Process Definition
        #-------------------------------------------------------------------------------------
        dpd_obj = IonObject(RT.DataProcessDefinition,
            name='ctd_L1_conductivity',
            description='create the L1 conductivity data product',
            module='ion.processes.data.transforms.ctd.ctd_L1_conductivity',
            class_name='CTDL1ConductivityTransform')
        self.ctd_L1_conductivity_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)

        #-------------------------------------------------------------------------------------
        # L1 Pressure: Data Process Definition
        #-------------------------------------------------------------------------------------
        dpd_obj = IonObject(RT.DataProcessDefinition,
            name='ctd_L1_pressure',
            description='create the L1 pressure data product',
            module='ion.processes.data.transforms.ctd.ctd_L1_pressure',
            class_name='CTDL1PressureTransform')
        self.ctd_L1_pressure_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)

        #-------------------------------------------------------------------------------------
        # L1 Temperature: Data Process Definition
        #-------------------------------------------------------------------------------------
        dpd_obj = IonObject(RT.DataProcessDefinition,
            name='ctd_L1_temperature',
            description='create the L1 temperature data product',
            module='ion.processes.data.transforms.ctd.ctd_L1_temperature',
            class_name='CTDL1TemperatureTransform')
        self.ctd_L1_temperature_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)

        #-------------------------------------------------------------------------------------
        # L2 Salinity: Data Process Definition
        #-------------------------------------------------------------------------------------
        dpd_obj = IonObject(RT.DataProcessDefinition,
            name='ctd_L2_salinity',
            description='create the L1 temperature data product',
            module='ion.processes.data.transforms.ctd.ctd_L2_salinity',
            class_name='SalinityTransform')
        self.ctd_L2_salinity_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)

        #-------------------------------------------------------------------------------------
        # L2 Density: Data Process Definition
        #-------------------------------------------------------------------------------------
        dpd_obj = IonObject(RT.DataProcessDefinition,
            name='ctd_L2_density',
            description='create the L1 temperature data product',
            module='ion.processes.data.transforms.ctd.ctd_L2_density',
            class_name='DensityTransform')
        self.ctd_L2_density_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)

        return self.ctd_L0_all_dprocdef_id, self.ctd_L1_conductivity_dprocdef_id,\
               self.ctd_L1_pressure_dprocdef_id,self.ctd_L1_temperature_dprocdef_id, \
                self.ctd_L2_salinity_dprocdef_id, self.ctd_L2_density_dprocdef_id

    def _create_stream_definitions(self):
        if not self.pdict_id:
            self._create_param_dicts()

        outgoing_stream_l0_conductivity_id = self.pubsubclient.create_stream_definition(name='L0_Conductivity', parameter_dictionary_id=self.pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_conductivity_id, self.ctd_L0_all_dprocdef_id, binding='conductivity' )

        outgoing_stream_l0_pressure_id = self.pubsubclient.create_stream_definition(name='L0_Pressure', parameter_dictionary_id=self.pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_pressure_id, self.ctd_L0_all_dprocdef_id, binding= 'pressure' )

        outgoing_stream_l0_temperature_id = self.pubsubclient.create_stream_definition(name='L0_Temperature', parameter_dictionary_id=self.pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_temperature_id, self.ctd_L0_all_dprocdef_id, binding='temperature' )

        return outgoing_stream_l0_conductivity_id, outgoing_stream_l0_pressure_id, outgoing_stream_l0_temperature_id


    def _create_l0_output_data_products(self, outgoing_stream_l0_conductivity_id, outgoing_stream_l0_pressure_id, outgoing_stream_l0_temperature_id):

        out_data_prods = []


        ctd_l0_conductivity_output_dp_obj = IonObject(  RT.DataProduct,
            name='L0_Conductivity',
            description='transform output conductivity',
            temporal_domain = self.tdom,
            spatial_domain = self.sdom)

        self.ctd_l0_conductivity_output_dp_id = self.dataproductclient.create_data_product(ctd_l0_conductivity_output_dp_obj,
            outgoing_stream_l0_conductivity_id)
        out_data_prods.append(self.ctd_l0_conductivity_output_dp_id)
        self.dataproductclient.activate_data_product_persistence(data_product_id=self.ctd_l0_conductivity_output_dp_id)

        ctd_l0_pressure_output_dp_obj = IonObject(  RT.DataProduct,
            name='L0_Pressure',
            description='transform output pressure',
            temporal_domain = self.tdom,
            spatial_domain = self.sdom)

        self.ctd_l0_pressure_output_dp_id = self.dataproductclient.create_data_product(ctd_l0_pressure_output_dp_obj,
            outgoing_stream_l0_pressure_id)
        out_data_prods.append(self.ctd_l0_pressure_output_dp_id)
        self.dataproductclient.activate_data_product_persistence(data_product_id=self.ctd_l0_pressure_output_dp_id)

        ctd_l0_temperature_output_dp_obj = IonObject(   RT.DataProduct,
            name='L0_Temperature',
            description='transform output temperature',
            temporal_domain = self.tdom,
            spatial_domain = self.sdom)

        self.ctd_l0_temperature_output_dp_id = self.dataproductclient.create_data_product(ctd_l0_temperature_output_dp_obj,
            outgoing_stream_l0_temperature_id)
        out_data_prods.append(self.ctd_l0_temperature_output_dp_id)
        self.dataproductclient.activate_data_product_persistence(data_product_id=self.ctd_l0_temperature_output_dp_id)

        return out_data_prods

    def _create_l1_out_data_products(self):

        ctd_l1_conductivity_output_dp_obj = IonObject(  RT.DataProduct,
            name='L1_Conductivity',
            description='transform output L1 conductivity',
            temporal_domain = self.tdom,
            spatial_domain = self.sdom)

        self.ctd_l1_conductivity_output_dp_id = self.dataproductclient.create_data_product(ctd_l1_conductivity_output_dp_obj,
            self.outgoing_stream_l1_conductivity_id)
        self.dataproductclient.activate_data_product_persistence(data_product_id=self.ctd_l1_conductivity_output_dp_id)
        # Retrieve the id of the OUTPUT stream from the out Data Product and add to granule logger
        stream_ids, _ = self.rrclient.find_objects(self.ctd_l1_conductivity_output_dp_id, PRED.hasStream, None, True)
        pid = self.create_logger('ctd_l1_conductivity', stream_ids[0] )
        self.loggerpids.append(pid)

        ctd_l1_pressure_output_dp_obj = IonObject(  RT.DataProduct,
            name='L1_Pressure',
            description='transform output L1 pressure',
            temporal_domain = self.tdom,
            spatial_domain = self.sdom)

        self.ctd_l1_pressure_output_dp_id = self.dataproductclient.create_data_product(ctd_l1_pressure_output_dp_obj,
            self.outgoing_stream_l1_pressure_id)
        self.dataproductclient.activate_data_product_persistence(data_product_id=self.ctd_l1_pressure_output_dp_id)
        # Retrieve the id of the OUTPUT stream from the out Data Product and add to granule logger
        stream_ids, _ = self.rrclient.find_objects(self.ctd_l1_pressure_output_dp_id, PRED.hasStream, None, True)
        pid = self.create_logger('ctd_l1_pressure', stream_ids[0] )
        self.loggerpids.append(pid)

        ctd_l1_temperature_output_dp_obj = IonObject(   RT.DataProduct,
            name='L1_Temperature',
            description='transform output L1 temperature',
            temporal_domain = self.tdom,
            spatial_domain = self.sdom)

        self.ctd_l1_temperature_output_dp_id = self.dataproductclient.create_data_product(ctd_l1_temperature_output_dp_obj,
            self.outgoing_stream_l1_temperature_id)
        self.dataproductclient.activate_data_product_persistence(data_product_id=self.ctd_l1_temperature_output_dp_id)
        # Retrieve the id of the OUTPUT stream from the out Data Product and add to granule logger
        stream_ids, _ = self.rrclient.find_objects(self.ctd_l1_temperature_output_dp_id, PRED.hasStream, None, True)
        pid = self.create_logger('ctd_l1_temperature', stream_ids[0] )
        self.loggerpids.append(pid)

    def _create_l2_out_data_products(self):

        #-------------------------------
        # L2 Salinity - Density: Output Data Products
        #-------------------------------

        if not self.pdict_id: self._create_param_dicts()
        outgoing_stream_l2_salinity_id = self.pubsubclient.create_stream_definition(name='L2_salinity', parameter_dictionary_id=self.pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l2_salinity_id, self.ctd_L2_salinity_dprocdef_id, binding='salinity' )

        outgoing_stream_l2_density_id = self.pubsubclient.create_stream_definition(name='L2_Density', parameter_dictionary_id=self.pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l2_density_id, self.ctd_L2_density_dprocdef_id, binding='density' )

        ctd_l2_salinity_output_dp_obj = IonObject(RT.DataProduct,
            name='L2_Salinity',
            description='transform output L2 salinity',
            temporal_domain = self.tdom,
            spatial_domain = self.sdom)

        self.ctd_l2_salinity_output_dp_id = self.dataproductclient.create_data_product(ctd_l2_salinity_output_dp_obj,
            outgoing_stream_l2_salinity_id)

        self.dataproductclient.activate_data_product_persistence(data_product_id=self.ctd_l2_salinity_output_dp_id)
        # Retrieve the id of the OUTPUT stream from the out Data Product and add to granule logger
        stream_ids, _ = self.rrclient.find_objects(self.ctd_l2_salinity_output_dp_id, PRED.hasStream, None, True)
        pid = self.create_logger('ctd_l2_salinity', stream_ids[0] )
        self.loggerpids.append(pid)

        ctd_l2_density_output_dp_obj = IonObject(   RT.DataProduct,
            name='L2_Density',
            description='transform output pressure',
            temporal_domain = self.tdom,
            spatial_domain = self.sdom)

        self.ctd_l2_density_output_dp_id = self.dataproductclient.create_data_product(ctd_l2_density_output_dp_obj,
            outgoing_stream_l2_density_id)
            

        self.dataproductclient.activate_data_product_persistence(data_product_id=self.ctd_l2_density_output_dp_id)
        # Retrieve the id of the OUTPUT stream from the out Data Product and add to granule logger
        stream_ids, _ = self.rrclient.find_objects(self.ctd_l2_density_output_dp_id, PRED.hasStream, None, True)
        pid = self.create_logger('ctd_l2_density', stream_ids[0] )
        self.loggerpids.append(pid)

    @unittest.skip('This test errors on coi-nightly, may be OBE.')
    def test_createTransformsThenActivateInstrument(self):

        self.loggerpids = []

        #-------------------------------------------------------------------------------------
        # Create InstrumentModel
        #-------------------------------------------------------------------------------------

        instModel_id = self._create_instrument_model()

        #-------------------------------------------------------------------------------------
        # Create InstrumentAgent
        #-------------------------------------------------------------------------------------

        instAgent_id = self._create_instrument_agent(instModel_id)

        #-------------------------------------------------------------------------------------
        # Create InstrumentDevice
        #-------------------------------------------------------------------------------------

        instDevice_id = self._create_instrument_device(instModel_id)

        #-------------------------------------------------------------------------------------
        # Create Instrument Agent Instance
        #-------------------------------------------------------------------------------------

        instAgentInstance_id = self._create_instrument_agent_instance(instAgent_id,instDevice_id )

        #-------------------------------------------------------------------------------------
        # create a stream definition for the data from the ctd simulator
        #-------------------------------------------------------------------------------------

        self._create_param_dicts()
        ctd_stream_def_id = self.pubsubclient.create_stream_definition(name='SBE37_CDM', 
                                                parameter_dictionary_id=self.pdict_id)



        #-------------------------------------------------------------------------------------
        # Create two data products
        #-------------------------------------------------------------------------------------

        ctd_parsed_data_product = self._create_input_data_products(ctd_stream_def_id,instDevice_id)

        #-------------------------------------------------------------------------------------
        # Create data process definitions
        #-------------------------------------------------------------------------------------
        self._create_data_process_definitions()

        #-------------------------------------------------------------------------------------
        # L0 Conductivity - Temperature - Pressure: Output Data Products
        #-------------------------------------------------------------------------------------

        outgoing_stream_l0_conductivity_id, \
        outgoing_stream_l0_pressure_id, \
        outgoing_stream_l0_temperature_id = self._create_stream_definitions()

        self.out_prod_ids = self._create_l0_output_data_products(outgoing_stream_l0_conductivity_id,outgoing_stream_l0_pressure_id,outgoing_stream_l0_temperature_id)

        self.outgoing_stream_l1_conductivity_id = self.pubsubclient.create_stream_definition(name='L1_conductivity', parameter_dictionary_id=self.pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(self.outgoing_stream_l1_conductivity_id, self.ctd_L1_conductivity_dprocdef_id, binding='conductivity' )

        self.outgoing_stream_l1_pressure_id = self.pubsubclient.create_stream_definition(name='L1_Pressure', parameter_dictionary_id=self.pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(self.outgoing_stream_l1_pressure_id, self.ctd_L1_pressure_dprocdef_id, binding='pressure' )
        
        self.outgoing_stream_l1_temperature_id = self.pubsubclient.create_stream_definition(name='L1_Temperature', parameter_dictionary_id=self.pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(self.outgoing_stream_l1_temperature_id, self.ctd_L1_temperature_dprocdef_id, binding= 'temperature' )

        self._create_l1_out_data_products()

        self._create_l2_out_data_products()

        #-------------------------------------------------------------------------------------
        # L0 Conductivity - Temperature - Pressure: Create the data process
        #-------------------------------------------------------------------------------------
        ctd_l0_all_data_process_id = self.dataprocessclient.create_data_process(
            data_process_definition_id = self.ctd_L0_all_dprocdef_id,
            in_data_product_ids = [ctd_parsed_data_product],
            out_data_product_ids = self.out_prod_ids)
        self.dataprocessclient.activate_data_process(ctd_l0_all_data_process_id)

        data_process = self.rrclient.read(ctd_l0_all_data_process_id)

        process_ids, _ = self.rrclient.find_objects(subject=ctd_l0_all_data_process_id, predicate=PRED.hasProcess, id_only=True)
        self.addCleanup(self.processdispatchclient.cancel_process,process_ids[0])

        extended_process = self.dataprocessclient.get_data_process_extension(ctd_l0_all_data_process_id)
        self.assertEquals(extended_process.computed.operational_state.status, ComputedValueAvailability.NOTAVAILABLE)
        self.assertEquals(data_process.message_controllable, True)


        #-------------------------------------------------------------------------------------
        # L1 Conductivity: Create the data process
        #-------------------------------------------------------------------------------------
        l1_conductivity_data_process_id = self.dataprocessclient.create_data_process(self.ctd_L1_conductivity_dprocdef_id, [self.ctd_l0_conductivity_output_dp_id], [self.ctd_l1_conductivity_output_dp_id])
        self.dataprocessclient.activate_data_process(l1_conductivity_data_process_id)

        data_process = self.rrclient.read(l1_conductivity_data_process_id)
        process_ids, _ = self.rrclient.find_objects(subject=l1_conductivity_data_process_id, predicate=PRED.hasProcess, id_only=True)
        self.addCleanup(self.processdispatchclient.cancel_process,process_ids[0])

        #-------------------------------------------------------------------------------------
        # L1 Pressure: Create the data process
        #-------------------------------------------------------------------------------------
        l1_pressure_data_process_id = self.dataprocessclient.create_data_process(self.ctd_L1_pressure_dprocdef_id, [self.ctd_l0_pressure_output_dp_id], [self.ctd_l1_pressure_output_dp_id])
        self.dataprocessclient.activate_data_process(l1_pressure_data_process_id)

        data_process = self.rrclient.read(l1_pressure_data_process_id)
        process_ids, _ = self.rrclient.find_objects(subject=l1_pressure_data_process_id, predicate=PRED.hasProcess, id_only=True)
        self.addCleanup(self.processdispatchclient.cancel_process, process_ids[0])

        #-------------------------------------------------------------------------------------
        # L1 Temperature: Create the data process
        #-------------------------------------------------------------------------------------
        l1_temperature_all_data_process_id = self.dataprocessclient.create_data_process(self.ctd_L1_temperature_dprocdef_id, [self.ctd_l0_temperature_output_dp_id], [self.ctd_l1_temperature_output_dp_id])
        self.dataprocessclient.activate_data_process(l1_temperature_all_data_process_id)

        data_process = self.rrclient.read(l1_temperature_all_data_process_id)
        process_ids, _ = self.rrclient.find_objects(subject=l1_temperature_all_data_process_id, predicate=PRED.hasProcess, id_only=True)
        self.addCleanup(self.processdispatchclient.cancel_process, process_ids[0])

        #-------------------------------------------------------------------------------------
        # L2 Salinity: Create the data process
        #-------------------------------------------------------------------------------------
        l2_salinity_all_data_process_id = self.dataprocessclient.create_data_process(self.ctd_L2_salinity_dprocdef_id, [ctd_parsed_data_product], [self.ctd_l2_salinity_output_dp_id])
        self.dataprocessclient.activate_data_process(l2_salinity_all_data_process_id)

        data_process = self.rrclient.read(l2_salinity_all_data_process_id)
        process_ids, _ = self.rrclient.find_objects(subject=l2_salinity_all_data_process_id, predicate=PRED.hasProcess, id_only=True)
        self.addCleanup(self.processdispatchclient.cancel_process, process_ids[0])

        #-------------------------------------------------------------------------------------
        # L2 Density: Create the data process
        #-------------------------------------------------------------------------------------
        l2_density_all_data_process_id = self.dataprocessclient.create_data_process(self.ctd_L2_density_dprocdef_id, [ctd_parsed_data_product], [self.ctd_l2_density_output_dp_id])
        self.dataprocessclient.activate_data_process(l2_density_all_data_process_id)

        data_process = self.rrclient.read(l2_density_all_data_process_id)
        process_ids, _ = self.rrclient.find_objects(subject=l2_density_all_data_process_id, predicate=PRED.hasProcess, id_only=True)
        self.addCleanup(self.processdispatchclient.cancel_process, process_ids[0])

        #-------------------------------------------------------------------------------------
        # Launch InstrumentAgentInstance, connect to the resource agent client
        #-------------------------------------------------------------------------------------
        self.imsclient.start_instrument_agent_instance(instrument_agent_instance_id=instAgentInstance_id)
        self.addCleanup(self.imsclient.stop_instrument_agent_instance,
                        instrument_agent_instance_id=instAgentInstance_id)

        inst_agent_instance_obj= self.imsclient.read_instrument_agent_instance(instAgentInstance_id)
        
        # Wait for instrument agent to spawn
        gate = AgentProcessStateGate(self.processdispatchclient.read_process,
                                     instDevice_id,
                                     ProcessStateEnum.RUNNING)
        self.assertTrue(gate.await(15), "The instrument agent instance did not spawn in 15 seconds")

        # Start a resource agent client to talk with the instrument agent.
        self._ia_client = ResourceAgentClient(instDevice_id, to_name=gate.process_id, process=FakeProcess())

        #-------------------------------------------------------------------------------------
        # Streaming
        #-------------------------------------------------------------------------------------


        cmd = AgentCommand(command=ResourceAgentEvent.INITIALIZE)
        reply = self._ia_client.execute_agent(cmd)
        self.assertTrue(reply.status == 0)

        cmd = AgentCommand(command=ResourceAgentEvent.GO_ACTIVE)
        reply = self._ia_client.execute_agent(cmd)
        self.assertTrue(reply.status == 0)

        cmd = AgentCommand(command=ResourceAgentEvent.GET_RESOURCE_STATE)
        retval = self._ia_client.execute_agent(cmd)
        state = retval.result
        log.debug("(L4-CI-SA-RQ-334): current state after sending go_active command %s", str(state))
        self.assertTrue(state, 'DRIVER_STATE_COMMAND')

        cmd = AgentCommand(command=ResourceAgentEvent.RUN)
        reply = self._ia_client.execute_agent(cmd)
        self.assertTrue(reply.status == 0)

        #todo ResourceAgentClient no longer has method set_param
#        # Make sure the sampling rate and transmission are sane.
#        params = {
#            SBE37Parameter.NAVG : 1,
#            SBE37Parameter.INTERVAL : 5,
#            SBE37Parameter.TXREALTIME : True
#        }
#        self._ia_client.set_param(params)


        #todo There is no ResourceAgentEvent attribute for go_streaming... so what should be the command for it?
        cmd = AgentCommand(command=SBE37ProtocolEvent.START_AUTOSAMPLE)
        retval = self._ia_client.execute_resource(cmd)

        # This gevent sleep is there to test the autosample time, which will show something different from default
        # only if the instrument runs for over a minute
        gevent.sleep(90)

        extended_instrument = self.imsclient.get_instrument_device_extension(instrument_device_id=instDevice_id)

        self.assertIsInstance(extended_instrument.computed.uptime, ComputedStringValue)

        autosample_string = extended_instrument.computed.uptime.value
        autosampling_time = int(autosample_string.split()[4])

        self.assertTrue(autosampling_time > 0)

        cmd = AgentCommand(command=SBE37ProtocolEvent.STOP_AUTOSAMPLE)
        retval = self._ia_client.execute_resource(cmd)

        #todo There is no ResourceAgentEvent attribute for go_observatory... so what should be the command for it?
#        log.debug("test_activateInstrumentStream: calling go_observatory")
#        cmd = AgentCommand(command='go_observatory')
#        reply = self._ia_client.execute_agent(cmd)
#        cmd = AgentCommand(command='get_current_state')
#        retval = self._ia_client.execute_agent(cmd)
#        state = retval.result
#        log.debug("test_activateInstrumentStream: return from go_observatory state  %s", str(state))

        cmd = AgentCommand(command=ResourceAgentEvent.RESET)
        reply = self._ia_client.execute_agent(cmd)
        self.assertTrue(reply.status == 0)


        #-------------------------------------------------------------------------------------------------
        # Cleanup processes
        #-------------------------------------------------------------------------------------------------
        for pid in self.loggerpids:
            self.processdispatchclient.cancel_process(pid)


        #--------------------------------------------------------------------------------
        # Cleanup data products
        #--------------------------------------------------------------------------------
        dp_ids, _ = self.rrclient.find_resources(restype=RT.DataProduct, id_only=True)

        for dp_id in dp_ids:
            self.dataproductclient.delete_data_product(dp_id)
class TestDataProductProvenance(IonIntegrationTestCase):

    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.pubsubclient =  PubsubManagementServiceClient(node=self.container.node)
        self.ingestclient = IngestionManagementServiceClient(node=self.container.node)
        self.dpmsclient = DataProductManagementServiceClient(node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
        self.omsclient = ObservatoryManagementServiceClient(node=self.container.node)
        self.process_dispatcher   = ProcessDispatcherServiceClient()

        self.dataset_management = DatasetManagementServiceClient()


        # deactivate all data processes when tests are complete
        def killAllDataProcesses():
            for proc_id in self.rrclient.find_resources(RT.DataProcess, None, None, True)[0]:
                self.dataprocessclient.deactivate_data_process(proc_id)
                self.dataprocessclient.delete_data_process(proc_id)
        self.addCleanup(killAllDataProcesses)

    def test_get_data_product_provenance_report(self):
        #Create a test device
        device_obj = Device(name='Device1',
                                        description='test instrument site')
        device_id, _ = self.rrclient.create(device_obj)
        self.addCleanup(self.rrclient.delete, device_id)

        #Create a test DataProduct
        data_product1_obj = DataProduct(name='DataProduct1',
                                        description='test data product 1')
        data_product1_id, _ = self.rrclient.create(data_product1_obj)
        self.addCleanup(self.rrclient.delete, data_product1_id)

        #Create a test DataProcess
        data_process_obj = DataProcess(name='DataProcess',
                                       description='test data process')
        data_process_id, _ = self.rrclient.create(data_process_obj)
        self.addCleanup(self.rrclient.delete, data_process_id)

        #Create a second test DataProduct
        data_product2_obj = DataProduct(name='DataProduct2',
                                        description='test data product 2')
        data_product2_id, _ = self.rrclient.create(data_product2_obj)
        self.addCleanup(self.rrclient.delete, data_product2_id)

        #Create a test DataProducer
        data_producer_obj = DataProducer(name='DataProducer',
                                         description='test data producer')
        data_producer_id, rev = self.rrclient.create(data_producer_obj)

        #Link the DataProcess to the second DataProduct manually
        assoc_id, _ = self.rrclient.create_association(subject=data_process_id, predicate=PRED.hasInputProduct, object=data_product2_id)
        self.addCleanup(self.rrclient.delete_association, assoc_id)

        # Register the instrument and process. This links the device and the data process
        # with their own producers
        self.damsclient.register_instrument(device_id)
        self.addCleanup(self.damsclient.unregister_instrument, device_id)
        self.damsclient.register_process(data_process_id)
        self.addCleanup(self.damsclient.unregister_process, data_process_id)

        #Manually link the first DataProduct with the test DataProducer
        assoc_id, _ = self.rrclient.create_association(subject=data_product1_id, predicate=PRED.hasDataProducer, object=data_producer_id)

        #Get the DataProducer linked to the DataProcess (created in register_process above)
        #Associate that with with DataProduct1's DataProducer
        data_process_producer_ids, _ = self.rrclient.find_objects(subject=data_process_id, predicate=PRED.hasDataProducer, object_type=RT.DataProducer, id_only=True)
        assoc_id, _ = self.rrclient.create_association(subject=data_process_producer_ids[0], predicate=PRED.hasParent, object=data_producer_id)
        self.addCleanup(self.rrclient.delete_association, assoc_id)

        #Get the DataProducer linked to the Device (created in register_instrument
        #Associate that with the DataProcess's DataProducer
        device_producer_ids, _ = self.rrclient.find_objects(subject=device_id, predicate=PRED.hasDataProducer, object_type=RT.DataProducer, id_only=True)
        assoc_id, _ = self.rrclient.create_association(subject=data_producer_id, predicate=PRED.hasParent, object=device_producer_ids[0])

        #Create the links between the Device, DataProducts, DataProcess, and all DataProducers
        self.damsclient.assign_data_product(input_resource_id=device_id, data_product_id=data_product1_id)
        self.addCleanup(self.damsclient.unassign_data_product, device_id, data_product1_id)
        self.damsclient.assign_data_product(input_resource_id=data_process_id, data_product_id=data_product2_id)
        self.addCleanup(self.damsclient.unassign_data_product, data_process_id, data_product2_id)

        #Traverse through the relationships to get the links between objects
        res = self.dpmsclient.get_data_product_provenance_report(data_product2_id)

        #Make sure there are four keys
        self.assertEqual(len(res.keys()), 4)

        parent_count = 0
        config_count = 0
        for v in res.itervalues():
            if 'parent' in v:
                parent_count += 1
            if 'config' in v:
                config_count += 1

        #Make sure there are three parents and four configs
        self.assertEqual(parent_count, 3)
        self.assertEqual(config_count, 4)


    @unittest.skip('This test is obsolete with new framework')
    def test_get_provenance(self):

        #create a deployment with metadata and an initial site and device
        instrument_site_obj = IonObject(RT.InstrumentSite,
                                        name='InstrumentSite1',
                                        description='test instrument site')
        instrument_site_id = self.omsclient.create_instrument_site(instrument_site_obj, "")
        log.debug( 'test_get_provenance: new instrument_site_id id = %s ', str(instrument_site_id))


        # Create InstrumentModel
        instModel_obj = IonObject(RT.InstrumentModel,
                                  name='SBE37IMModel',
                                  description="SBE37IMModel" )

        try:
            instModel_id = self.imsclient.create_instrument_model(instModel_obj)
        except BadRequest as ex:
            self.fail("failed to create new InstrumentModel: %s" %ex)
        log.debug( 'test_get_provenance: new InstrumentModel id = %s ', str(instModel_id))

        self.omsclient.assign_instrument_model_to_instrument_site(instModel_id, instrument_site_id)


        # Create InstrumentAgent
        parsed_config = StreamConfiguration(stream_name='parsed', parameter_dictionary_name='ctd_parsed_param_dict' )
        instAgent_obj = IonObject(RT.InstrumentAgent,
                                name='agent007',
                                description="SBE37IMAgent",
                                driver_uri=DRV_URI_GOOD,
                                stream_configurations = [parsed_config] )
        try:
            instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj)
        except BadRequest as ex:
            self.fail("failed to create new InstrumentAgent: %s" %ex)
        log.debug( 'test_get_provenance:new InstrumentAgent id = %s', instAgent_id)

        self.imsclient.assign_instrument_model_to_instrument_agent(instModel_id, instAgent_id)

        # Create InstrumentDevice
        log.debug('test_get_provenance: Create instrument resource to represent the SBE37 (SA Req: L4-CI-SA-RQ-241) ')
        instDevice_obj = IonObject(RT.InstrumentDevice, name='SBE37IMDevice', description="SBE37IMDevice", serial_number="12345" )
        try:
            instDevice_id = self.imsclient.create_instrument_device(instrument_device=instDevice_obj)
            self.imsclient.assign_instrument_model_to_instrument_device(instModel_id, instDevice_id)
        except BadRequest as ex:
            self.fail("failed to create new InstrumentDevice: %s" %ex)

        log.debug("test_get_provenance: new InstrumentDevice id = %s    (SA Req: L4-CI-SA-RQ-241) ", instDevice_id)


        #-------------------------------
        # Create CTD Parsed  data product
        #-------------------------------
        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()


        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        parsed_stream_def_id = self.pubsubclient.create_stream_definition(name='parsed', parameter_dictionary_id=pdict_id)

        log.debug( 'test_get_provenance:Creating new CDM data product with a stream definition')


        dp_obj = IonObject(RT.DataProduct,
            name='the parsed data',
            description='ctd stream test',
            temporal_domain = tdom,
            spatial_domain = sdom)

        ctd_parsed_data_product = self.dpmsclient.create_data_product(data_product=dp_obj, stream_definition_id=parsed_stream_def_id)
        log.debug( 'new dp_id = %s', ctd_parsed_data_product)

        self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=ctd_parsed_data_product)
        self.dpmsclient.activate_data_product_persistence(data_product_id=ctd_parsed_data_product)

        #-------------------------------
        # create a data product for the site to pass the OMS check.... we need to remove this check
        #-------------------------------
        dp_obj = IonObject(RT.DataProduct,
            name='DP1',
            description='some new dp',
            temporal_domain = tdom,
            spatial_domain = sdom)

        log_data_product_id = self.dpmsclient.create_data_product(dp_obj, parsed_stream_def_id)


        #-------------------------------
        # Deploy instrument device to instrument site
        #-------------------------------
        deployment_obj = IonObject(RT.Deployment,
                                        name='TestDeployment',
                                        description='some new deployment')
        deployment_id = self.omsclient.create_deployment(deployment_obj)
        self.omsclient.deploy_instrument_site(instrument_site_id, deployment_id)
        self.imsclient.deploy_instrument_device(instDevice_id, deployment_id)

        log.debug("test_create_deployment: created deployment id: %s ", str(deployment_id) )

        self.omsclient.activate_deployment(deployment_id)
        inst_device_objs, _ = self.rrclient.find_objects(subject=instrument_site_id, predicate=PRED.hasDevice, object_type=RT.InstrumetDevice, id_only=False)
        log.debug("test_create_deployment: deployed device: %s ", str(inst_device_objs[0]) )

        #-------------------------------
        # Create the agent instance
        #-------------------------------

        port_agent_config = {
            'device_addr':  CFG.device.sbe37.host,
            'device_port':  CFG.device.sbe37.port,
            'process_type': PortAgentProcessType.UNIX,
            'binary_path': "port_agent",
            'port_agent_addr': 'localhost',
            'command_port': CFG.device.sbe37.port_agent_cmd_port,
            'data_port': CFG.device.sbe37.port_agent_data_port,
            'log_level': 5,
            'type': PortAgentType.ETHERNET
        }


        instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='SBE37IMAgentInstance',
            description="SBE37IMAgentInstance",
            port_agent_config = port_agent_config)

        instAgentInstance_id = self.imsclient.create_instrument_agent_instance(instAgentInstance_obj, instAgent_id, instDevice_id)


        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Data Process Definition
        #-------------------------------
        log.debug("TestDataProductProvenance: create data process definition ctd_L0_all")
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='ctd_L0_all',
                            description='transform ctd package into three separate L0 streams',
                            module='ion.processes.data.transforms.ctd.ctd_L0_all',
                            class_name='ctd_L0_all')
        try:
            ctd_L0_all_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
        except BadRequest as ex:
            self.fail("failed to create new ctd_L0_all data process definition: %s" %ex)


        #-------------------------------
        # L1 Conductivity: Data Process Definition
        #-------------------------------
        log.debug("TestDataProductProvenance: create data process definition CTDL1ConductivityTransform")
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='ctd_L1_conductivity',
                            description='create the L1 conductivity data product',
                            module='ion.processes.data.transforms.ctd.ctd_L1_conductivity',
                            class_name='CTDL1ConductivityTransform')
        try:
            ctd_L1_conductivity_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
        except BadRequest as ex:
            self.fail("failed to create new CTDL1ConductivityTransform data process definition: %s" %ex)

        #-------------------------------
        # L1 Pressure: Data Process Definition
        #-------------------------------
        log.debug("TestDataProductProvenance: create data process definition CTDL1PressureTransform")
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='ctd_L1_pressure',
                            description='create the L1 pressure data product',
                            module='ion.processes.data.transforms.ctd.ctd_L1_pressure',
                            class_name='CTDL1PressureTransform')
        try:
            ctd_L1_pressure_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
        except BadRequest as ex:
            self.fail("failed to create new CTDL1PressureTransform data process definition: %s" %ex)


        #-------------------------------
        # L1 Temperature: Data Process Definition
        #-------------------------------
        log.debug("TestDataProductProvenance: create data process definition CTDL1TemperatureTransform")
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='ctd_L1_temperature',
                            description='create the L1 temperature data product',
                            module='ion.processes.data.transforms.ctd.ctd_L1_temperature',
                            class_name='CTDL1TemperatureTransform')
        try:
            ctd_L1_temperature_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
        except BadRequest as ex:
            self.fail("failed to create new CTDL1TemperatureTransform data process definition: %s" %ex)


        #-------------------------------
        # L2 Salinity: Data Process Definition
        #-------------------------------
        log.debug("TestDataProductProvenance: create data process definition SalinityTransform")
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='ctd_L2_salinity',
                            description='create the L1 temperature data product',
                            module='ion.processes.data.transforms.ctd.ctd_L2_salinity',
                            class_name='SalinityTransform')
        try:
            ctd_L2_salinity_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
        except BadRequest as ex:
            self.fail("failed to create new SalinityTransform data process definition: %s" %ex)


        #-------------------------------
        # L2 Density: Data Process Definition
        #-------------------------------
        log.debug("TestDataProductProvenance: create data process definition DensityTransform")
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='ctd_L2_density',
                            description='create the L1 temperature data product',
                            module='ion.processes.data.transforms.ctd.ctd_L2_density',
                            class_name='DensityTransform')
        try:
            ctd_L2_density_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
        except BadRequest as ex:
            self.fail("failed to create new DensityTransform data process definition: %s" %ex)



        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Output Data Products
        #-------------------------------

        outgoing_stream_l0_conductivity_id = self.pubsubclient.create_stream_definition(name='L0_Conductivity', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_conductivity_id, ctd_L0_all_dprocdef_id, binding='conductivity' )

        outgoing_stream_l0_pressure_id = self.pubsubclient.create_stream_definition(name='L0_Pressure', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_pressure_id, ctd_L0_all_dprocdef_id, binding='pressure' )

        outgoing_stream_l0_temperature_id = self.pubsubclient.create_stream_definition(name='L0_Temperature', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_temperature_id, ctd_L0_all_dprocdef_id, binding='temperature' )


        log.debug("TestDataProductProvenance: create output data product L0 conductivity")

        ctd_l0_conductivity_output_dp_obj = IonObject(  RT.DataProduct,
                                                        name='L0_Conductivity',
                                                        description='transform output conductivity',
                                                        temporal_domain = tdom,
                                                        spatial_domain = sdom)

        ctd_l0_conductivity_output_dp_id = self.dpmsclient.create_data_product(ctd_l0_conductivity_output_dp_obj,
                                                                                outgoing_stream_l0_conductivity_id)


        log.debug("TestDataProductProvenance: create output data product L0 pressure")

        ctd_l0_pressure_output_dp_obj = IonObject(  RT.DataProduct,
                                                    name='L0_Pressure',
                                                    description='transform output pressure',
                                                    temporal_domain = tdom,
                                                    spatial_domain = sdom)

        ctd_l0_pressure_output_dp_id = self.dpmsclient.create_data_product(ctd_l0_pressure_output_dp_obj,
                                                                            outgoing_stream_l0_pressure_id)
        log.debug("TestDataProductProvenance: create output data product L0 temperature")

        ctd_l0_temperature_output_dp_obj = IonObject(   RT.DataProduct,
                                                        name='L0_Temperature',
                                                        description='transform output temperature',
                                                        temporal_domain = tdom,
                                                        spatial_domain = sdom)

        ctd_l0_temperature_output_dp_id = self.dpmsclient.create_data_product(ctd_l0_temperature_output_dp_obj,
                                                                                outgoing_stream_l0_temperature_id)

        #-------------------------------
        # L1 Conductivity - Temperature - Pressure: Output Data Products
        #-------------------------------

        outgoing_stream_l1_conductivity_id = self.pubsubclient.create_stream_definition(name='L1_conductivity', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l1_conductivity_id, ctd_L1_conductivity_dprocdef_id, binding='conductivity' )

        outgoing_stream_l1_pressure_id = self.pubsubclient.create_stream_definition(name='L1_Pressure', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l1_pressure_id, ctd_L1_pressure_dprocdef_id, binding='pressure' )

        outgoing_stream_l1_temperature_id = self.pubsubclient.create_stream_definition(name='L1_Temperature', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l1_temperature_id, ctd_L1_temperature_dprocdef_id, binding='temperature' )

        log.debug("TestDataProductProvenance: create output data product L1 conductivity")

        ctd_l1_conductivity_output_dp_obj = IonObject(RT.DataProduct,
            name='L1_Conductivity',
            description='transform output L1 conductivity',
            temporal_domain = tdom,
            spatial_domain = sdom)

        ctd_l1_conductivity_output_dp_id = self.dpmsclient.create_data_product(ctd_l1_conductivity_output_dp_obj,
                                                                                outgoing_stream_l1_conductivity_id)


        log.debug("TestDataProductProvenance: create output data product L1 pressure")

        ctd_l1_pressure_output_dp_obj = IonObject(  RT.DataProduct,
                                                    name='L1_Pressure',
                                                    description='transform output L1 pressure',
                                                    temporal_domain = tdom,
                                                    spatial_domain = sdom)

        ctd_l1_pressure_output_dp_id = self.dpmsclient.create_data_product(ctd_l1_pressure_output_dp_obj,
                                                                            outgoing_stream_l1_pressure_id)


        log.debug("TestDataProductProvenance: create output data product L1 temperature")

        ctd_l1_temperature_output_dp_obj = IonObject(   RT.DataProduct,
                                                        name='L1_Temperature',
                                                        description='transform output L1 temperature',
                                                        temporal_domain = tdom,
                                                        spatial_domain = sdom)

        ctd_l1_temperature_output_dp_id = self.dpmsclient.create_data_product(ctd_l1_temperature_output_dp_obj,
                                                                                outgoing_stream_l1_temperature_id)

        #-------------------------------
        # L2 Salinity - Density: Output Data Products
        #-------------------------------

        outgoing_stream_l2_salinity_id = self.pubsubclient.create_stream_definition(name='L2_salinity', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l2_salinity_id, ctd_L2_salinity_dprocdef_id, binding='salinity' )

        outgoing_stream_l2_density_id = self.pubsubclient.create_stream_definition(name='L2_Density', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l2_density_id, ctd_L2_density_dprocdef_id, binding='density' )

        log.debug("TestDataProductProvenance: create output data product L2 Salinity")

        ctd_l2_salinity_output_dp_obj = IonObject( RT.DataProduct,
                                                    name='L2_Salinity',
                                                    description='transform output L2 salinity',
                                                    temporal_domain = tdom,
                                                    spatial_domain = sdom)


        ctd_l2_salinity_output_dp_id = self.dpmsclient.create_data_product(ctd_l2_salinity_output_dp_obj,
                                                                            outgoing_stream_l2_salinity_id)


        log.debug("TestDataProductProvenance: create output data product L2 Density")

#        ctd_l2_density_output_dp_obj = IonObject(   RT.DataProduct,
#                                                    name='L2_Density',
#                                                    description='transform output pressure',
#                                                    temporal_domain = tdom,
#                                                    spatial_domain = sdom)
#
#        ctd_l2_density_output_dp_id = self.dpmsclient.create_data_product(ctd_l2_density_output_dp_obj,
#                                                                            outgoing_stream_l2_density_id,
#                                                                            parameter_dictionary)

        contactInfo = ContactInformation()
        contactInfo.individual_names_given = "Bill"
        contactInfo.individual_name_family = "Smith"
        contactInfo.street_address = "111 First St"
        contactInfo.city = "San Diego"
        contactInfo.email = "*****@*****.**"
        contactInfo.phones = ["858-555-6666"]
        contactInfo.country = "USA"
        contactInfo.postal_code = "92123"

        ctd_l2_density_output_dp_obj = IonObject(   RT.DataProduct,
                                                    name='L2_Density',
                                                    description='transform output pressure',
                                                    contacts = [contactInfo],
                                                    iso_topic_category = "my_iso_topic_category_here",
                                                    quality_control_level = "1",
                                                    temporal_domain = tdom,
                                                    spatial_domain = sdom)

        ctd_l2_density_output_dp_id = self.dpmsclient.create_data_product(ctd_l2_density_output_dp_obj,
                                                                            outgoing_stream_l2_density_id)

        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Create the data process
        #-------------------------------
        log.debug("TestDataProductProvenance: create L0 all data_process start")
        try:
            input_data_products = [ctd_parsed_data_product]
            output_data_products = [ctd_l0_conductivity_output_dp_id, ctd_l0_pressure_output_dp_id, ctd_l0_temperature_output_dp_id]

            ctd_l0_all_data_process_id = self.dataprocessclient.create_data_process(
                data_process_definition_id = ctd_L0_all_dprocdef_id,
                in_data_product_ids = input_data_products,
                out_data_product_ids = output_data_products
            )
            #activate only this data process just for coverage
            self.dataprocessclient.activate_data_process(ctd_l0_all_data_process_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" %ex)

        contents = "this is the lookup table  contents, replace with a file..."
        att = IonObject(RT.Attachment, name='deviceLookupTable', content=base64.encodestring(contents), keywords=['DataProcessInput'], attachment_type=AttachmentType.ASCII)
        deviceAttachment = self.rrclient.create_attachment(ctd_l0_all_data_process_id, att)
        log.info( 'test_createTransformsThenActivateInstrument: InstrumentDevice attachment id = %s', deviceAttachment)

        log.debug("TestDataProductProvenance: create L0 all data_process return")


        #-------------------------------
        # L1 Conductivity: Create the data process
        #-------------------------------
        log.debug("TestDataProductProvenance: create L1 Conductivity data_process start")
        try:
            l1_conductivity_data_process_id = self.dataprocessclient.create_data_process(
                data_process_definition_id = ctd_L1_conductivity_dprocdef_id,
                in_data_product_ids = [ctd_l0_conductivity_output_dp_id],
                out_data_product_ids = [ctd_l1_conductivity_output_dp_id])
            
            self.dataprocessclient.activate_data_process(l1_conductivity_data_process_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" %ex)




        #-------------------------------
        # L1 Pressure: Create the data process
        #-------------------------------
        log.debug("TestDataProductProvenance: create L1_Pressure data_process start")
        try:
            l1_pressure_data_process_id = self.dataprocessclient.create_data_process(
                data_process_definition_id = ctd_L1_pressure_dprocdef_id,
                in_data_product_ids = [ctd_l0_pressure_output_dp_id],
                out_data_product_ids = [ctd_l1_pressure_output_dp_id])

            self.dataprocessclient.activate_data_process(l1_pressure_data_process_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" %ex)


        #-------------------------------
        # L1 Temperature: Create the data process
        #-------------------------------
        log.debug("TestDataProductProvenance: create L1_Pressure data_process start")
        try:
            l1_temperature_all_data_process_id = self.dataprocessclient.create_data_process(
                data_process_definition_id = ctd_L1_temperature_dprocdef_id,
                in_data_product_ids = [ctd_l0_temperature_output_dp_id],
                out_data_product_ids = [ctd_l1_temperature_output_dp_id])

            self.dataprocessclient.activate_data_process(l1_temperature_all_data_process_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" %ex)

        #-------------------------------
        # L2 Salinity: Create the data process
        #-------------------------------
        log.debug("TestDataProductProvenance: create L2_salinity data_process start")
        try:
            l2_salinity_all_data_process_id = self.dataprocessclient.create_data_process(
                data_process_definition_id = ctd_L2_salinity_dprocdef_id,
                in_data_product_ids = [ctd_l1_conductivity_output_dp_id, ctd_l1_pressure_output_dp_id, ctd_l1_temperature_output_dp_id],
                out_data_product_ids = [ctd_l2_salinity_output_dp_id])

            self.dataprocessclient.activate_data_process(l2_salinity_all_data_process_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" %ex)


        #-------------------------------
        # L2 Density: Create the data process
        #-------------------------------
        log.debug("TestDataProductProvenance: create L2_Density data_process start")
        try:
            in_dp_ids = [ctd_l1_conductivity_output_dp_id, ctd_l1_pressure_output_dp_id, ctd_l1_temperature_output_dp_id]
            out_dp_ids = [ctd_l2_density_output_dp_id]

            l2_density_all_data_process_id = self.dataprocessclient.create_data_process(
                data_process_definition_id = ctd_L2_density_dprocdef_id,
                in_data_product_ids = in_dp_ids,
                out_data_product_ids = out_dp_ids)

            self.dataprocessclient.activate_data_process(l2_density_all_data_process_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" %ex)



        #-------------------------------
        # Launch InstrumentAgentInstance, connect to the resource agent client
        #-------------------------------
        self.imsclient.start_instrument_agent_instance(instrument_agent_instance_id=instAgentInstance_id)

        inst_agent_instance_obj= self.imsclient.read_instrument_agent_instance(instAgentInstance_id)
        print 'TestDataProductProvenance: Instrument agent instance obj: = ', inst_agent_instance_obj

        # Start a resource agent client to talk with the instrument agent.
#        self._ia_client = ResourceAgentClient('iaclient', name=ResourceAgentClient._get_agent_process_id(instDevice_id,  process=FakeProcess())
#        print 'activate_instrument: got ia client %s', self._ia_client
#        log.debug(" test_createTransformsThenActivateInstrument:: got ia client %s", str(self._ia_client))


        #-------------------------------
        # Deactivate InstrumentAgentInstance
        #-------------------------------
        self.imsclient.stop_instrument_agent_instance(instrument_agent_instance_id=instAgentInstance_id)

        self.dataprocessclient.deactivate_data_process(l2_density_all_data_process_id)
        self.dataprocessclient.deactivate_data_process(l2_salinity_all_data_process_id)
        self.dataprocessclient.deactivate_data_process(l1_temperature_all_data_process_id)
        self.dataprocessclient.deactivate_data_process(l1_pressure_data_process_id)
        self.dataprocessclient.deactivate_data_process(l1_conductivity_data_process_id)
        self.dataprocessclient.deactivate_data_process(ctd_l0_all_data_process_id)

        #-------------------------------
        # Retrieve the provenance info for the ctd density data product
        #-------------------------------
        provenance_dict = self.dpmsclient.get_data_product_provenance(ctd_l2_density_output_dp_id)
        log.debug("TestDataProductProvenance: provenance_dict  %s", str(provenance_dict))

        #validate that products are represented
        self.assertTrue (provenance_dict[str(ctd_l1_conductivity_output_dp_id)])
        self.assertTrue (provenance_dict[str(ctd_l0_conductivity_output_dp_id)])
        self.assertTrue (provenance_dict[str(ctd_l2_density_output_dp_id)])
        self.assertTrue (provenance_dict[str(ctd_l1_temperature_output_dp_id)])
        self.assertTrue (provenance_dict[str(ctd_l0_temperature_output_dp_id)])

        density_dict = (provenance_dict[str(ctd_l2_density_output_dp_id)])
        self.assertEquals(density_dict['producer'], [l2_density_all_data_process_id])


        #-------------------------------
        # Retrieve the extended resource for this data product
        #-------------------------------
        extended_product = self.dpmsclient.get_data_product_extension(ctd_l2_density_output_dp_id)
        self.assertEqual(1, len(extended_product.data_processes) )
        self.assertEqual(3, len(extended_product.process_input_data_products) )
#        log.debug("TestDataProductProvenance: DataProduct provenance_product_list  %s", str(extended_product.provenance_product_list))
#        log.debug("TestDataProductProvenance: DataProduct data_processes  %s", str(extended_product.data_processes))
#        log.debug("TestDataProductProvenance: DataProduct process_input_data_products  %s", str(extended_product.process_input_data_products))
#        log.debug("TestDataProductProvenance: provenance  %s", str(extended_product.computed.provenance.value))

        #-------------------------------
        # Retrieve the extended resource for this data process
        #-------------------------------
        extended_process_def = self.dataprocessclient.get_data_process_definition_extension(ctd_L0_all_dprocdef_id)

#        log.debug("TestDataProductProvenance: DataProcess extended_process_def  %s", str(extended_process_def))
#        log.debug("TestDataProductProvenance: DataProcess data_processes  %s", str(extended_process_def.data_processes))
#        log.debug("TestDataProductProvenance: DataProcess data_products  %s", str(extended_process_def.data_products))
        self.assertEqual(1, len(extended_process_def.data_processes) )
        self.assertEqual(3, len(extended_process_def.output_stream_definitions) )
        self.assertEqual(3, len(extended_process_def.data_products) ) #one list because of one data process

        #-------------------------------
        # Request the xml report
        #-------------------------------
        results = self.dpmsclient.get_data_product_provenance_report(ctd_l2_density_output_dp_id)
        print results

        #-------------------------------
        # Cleanup
        #-------------------------------

        self.dpmsclient.delete_data_product(ctd_parsed_data_product)
        self.dpmsclient.delete_data_product(log_data_product_id)
        self.dpmsclient.delete_data_product(ctd_l0_conductivity_output_dp_id)
        self.dpmsclient.delete_data_product(ctd_l0_pressure_output_dp_id)
        self.dpmsclient.delete_data_product(ctd_l0_temperature_output_dp_id)
        self.dpmsclient.delete_data_product(ctd_l1_conductivity_output_dp_id)
        self.dpmsclient.delete_data_product(ctd_l1_pressure_output_dp_id)
        self.dpmsclient.delete_data_product(ctd_l1_temperature_output_dp_id)
        self.dpmsclient.delete_data_product(ctd_l2_salinity_output_dp_id)
        self.dpmsclient.delete_data_product(ctd_l2_density_output_dp_id)
class TestIMSDeployAsPrimaryDevice(IonIntegrationTestCase):
    def setUp(self):
        # Start container
        self._start_container()

        #self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        print 'started services'

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(
            node=self.container.node)
        self.pubsubclient = PubsubManagementServiceClient(
            node=self.container.node)
        self.ingestclient = IngestionManagementServiceClient(
            node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(
            node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(
            node=self.container.node)
        self.datasetclient = DatasetManagementServiceClient(
            node=self.container.node)
        self.omsclient = ObservatoryManagementServiceClient(
            node=self.container.node)
        self.processdispatchclient = ProcessDispatcherServiceClient(
            node=self.container.node)
        self.dataset_management = DatasetManagementServiceClient()

        # deactivate all data processes when tests are complete
        def killAllDataProcesses():
            for proc_id in self.rrclient.find_resources(
                    RT.DataProcess, None, None, True)[0]:
                self.dataprocessclient.deactivate_data_process(proc_id)
                self.dataprocessclient.delete_data_process(proc_id)

        self.addCleanup(killAllDataProcesses)

    def create_logger(self, name, stream_id=''):

        # logger process
        producer_definition = ProcessDefinition(name=name + '_logger')
        producer_definition.executable = {
            'module': 'ion.processes.data.stream_granule_logger',
            'class': 'StreamGranuleLogger'
        }

        logger_procdef_id = self.processdispatchclient.create_process_definition(
            process_definition=producer_definition)
        configuration = {
            'process': {
                'stream_id': stream_id,
            }
        }
        pid = self.processdispatchclient.schedule_process(
            process_definition_id=logger_procdef_id,
            configuration=configuration)

        return pid

    def cleanupprocs(self):
        stm = os.popen('ps -e | grep ion.agents.port.logger_process')
        procs = stm.read()
        if len(procs) > 0:
            procs = procs.split()
            if procs[0].isdigit():
                pid = int(procs[0])
                os.kill(pid, signal.SIGKILL)
        stm = os.popen('ps -e | grep ion.agents.instrument.zmq_driver_process')
        procs = stm.read()
        if len(procs) > 0:
            procs = procs.split()
            if procs[0].isdigit():
                pid = int(procs[0])
                os.kill(pid, signal.SIGKILL)
#       stm = os.popen('rm /tmp/*.pid.txt')

    @unittest.skip(
        "Deprecated by IngestionManagement refactor, timeout on start inst agent?"
    )
    def test_deploy_activate_full(self):

        # ensure no processes or pids are left around by agents or Sims
        #self.cleanupprocs()

        self.loggerpids = []

        #-------------------------------
        # Create InstrumentModel
        #-------------------------------
        instModel_obj = IonObject(RT.InstrumentModel,
                                  name='SBE37IMModel',
                                  description="SBE37IMModel")
        try:
            instModel_id = self.imsclient.create_instrument_model(
                instModel_obj)
        except BadRequest as ex:
            self.fail("failed to create new InstrumentModel: %s" % ex)

        #-------------------------------
        # Create InstrumentAgent
        #-------------------------------
        instAgent_obj = IonObject(RT.InstrumentAgent,
                                  name='agent007',
                                  description="SBE37IMAgent",
                                  driver_uri=DRV_URI_GOOD)

        try:
            instAgent_id = self.imsclient.create_instrument_agent(
                instAgent_obj)
        except BadRequest as ex:
            self.fail("failed to create new InstrumentAgent: %s" % ex)
        log.debug('new InstrumentAgent id = %s', instAgent_id)

        self.imsclient.assign_instrument_model_to_instrument_agent(
            instModel_id, instAgent_id)

        #-------------------------------
        # Create Instrument Site
        #-------------------------------
        instrumentSite_obj = IonObject(RT.InstrumentSite,
                                       name='instrumentSite1',
                                       description="SBE37IMInstrumentSite")
        try:
            instrumentSite_id = self.omsclient.create_instrument_site(
                instrument_site=instrumentSite_obj, parent_id='')
        except BadRequest as ex:
            self.fail("failed to create new InstrumentSite: %s" % ex)
        print 'test_deployAsPrimaryDevice: new instrumentSite id = ', instrumentSite_id

        self.omsclient.assign_instrument_model_to_instrument_site(
            instModel_id, instrumentSite_id)

        #-------------------------------
        # Logical Transform: Output Data Products
        #-------------------------------

        # Construct temporal and spatial Coordinate Reference System objects
        tdom, sdom = time_series_domain()

        sdom = sdom.dump()
        tdom = tdom.dump()

        parsed_pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            'ctd_parsed_param_dict', id_only=True)
        parsed_stream_def_id = self.pubsubclient.create_stream_definition(
            name='parsed', parameter_dictionary_id=parsed_pdict_id)

        raw_pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            'ctd_raw_param_dict', id_only=True)
        raw_stream_def_id = self.pubsubclient.create_stream_definition(
            name='raw', parameter_dictionary_id=raw_pdict_id)

        #-------------------------------
        # Create Old InstrumentDevice
        #-------------------------------
        instDevice_obj = IonObject(
            RT.InstrumentDevice,
            name='SBE37IMDeviceYear1',
            description="SBE37IMDevice for the FIRST year of deployment",
            serial_number="12345")
        try:
            oldInstDevice_id = self.imsclient.create_instrument_device(
                instrument_device=instDevice_obj)
            self.imsclient.assign_instrument_model_to_instrument_device(
                instModel_id, oldInstDevice_id)
        except BadRequest as ex:
            self.fail("failed to create new InstrumentDevice: %s" % ex)

        print 'test_deployAsPrimaryDevice: new Year 1 InstrumentDevice id = ', oldInstDevice_id

        self.rrclient.execute_lifecycle_transition(oldInstDevice_id,
                                                   LCE.DEPLOY)
        self.rrclient.execute_lifecycle_transition(oldInstDevice_id,
                                                   LCE.ENABLE)

        #-------------------------------
        # Create Raw and Parsed Data Products for the device
        #-------------------------------

        dp_obj = IonObject(RT.DataProduct,
                           name='SiteDataProduct',
                           description='SiteDataProduct',
                           temporal_domain=tdom,
                           spatial_domain=sdom)

        instrument_site_output_dp_id = self.dataproductclient.create_data_product(
            data_product=dp_obj, stream_definition_id=parsed_stream_def_id)

        self.damsclient.assign_data_product(
            input_resource_id=oldInstDevice_id,
            data_product_id=instrument_site_output_dp_id)

        #self.dataproductclient.activate_data_product_persistence(data_product_id=instrument_site_output_dp_id)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(
            instrument_site_output_dp_id, PRED.hasStream, None, True)
        log.debug('Data product streams1 = %s', stream_ids)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        dataset_ids, _ = self.rrclient.find_objects(
            instrument_site_output_dp_id, PRED.hasDataset, RT.Dataset, True)
        log.debug('Data set for data_product_id1 = %s', dataset_ids[0])
        self.parsed_dataset = dataset_ids[0]

        pid = self.create_logger('ctd_parsed', stream_ids[0])
        self.loggerpids.append(pid)

        #-------------------------------
        # Create Old Deployment
        #-------------------------------
        deployment_obj = IonObject(RT.Deployment, name='first deployment')

        oldDeployment_id = self.omsclient.create_deployment(deployment_obj)

        # deploy this device to the logical slot
        self.imsclient.deploy_instrument_device(oldInstDevice_id,
                                                oldDeployment_id)
        self.omsclient.deploy_instrument_site(instrumentSite_id,
                                              oldDeployment_id)

        #-------------------------------
        # Create InstrumentAgentInstance for OldInstrumentDevice to hold configuration information
        # cmd_port=5556, evt_port=5557, comms_method="ethernet", comms_device_address=CFG.device.sbe37.host, comms_device_port=CFG.device.sbe37.port,
        #-------------------------------

        port_agent_config = {
            'device_addr': CFG.device.sbe37.host,
            'device_port': CFG.device.sbe37.port,
            'process_type': PortAgentProcessType.UNIX,
            'binary_path': "port_agent",
            'port_agent_addr': 'localhost',
            'command_port': CFG.device.sbe37.port_agent_cmd_port,
            'data_port': CFG.device.sbe37.port_agent_data_port,
            'log_level': 5,
            'type': PortAgentType.ETHERNET
        }

        raw_config = StreamConfiguration(
            stream_name='raw', parameter_dictionary_name='ctd_raw_param_dict')
        parsed_config = StreamConfiguration(
            stream_name='parsed',
            parameter_dictionary_name='ctd_parsed_param_dict')

        instAgentInstance_obj = IonObject(
            RT.InstrumentAgentInstance,
            name='SBE37IMAgentInstanceYear1',
            description="SBE37IMAgentInstanceYear1",
            port_agent_config=port_agent_config,
            stream_configurations=[raw_config, parsed_config])

        oldInstAgentInstance_id = self.imsclient.create_instrument_agent_instance(
            instAgentInstance_obj, instAgent_id, oldInstDevice_id)

        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()

        #-------------------------------
        # Create CTD Parsed as the Year 1 data product and attach to instrument
        #-------------------------------

        print 'Creating new CDM data product with a stream definition'

        dp_obj = IonObject(RT.DataProduct,
                           name='ctd_parsed_year1',
                           description='ctd stream test year 1',
                           temporal_domain=tdom,
                           spatial_domain=sdom)

        ctd_parsed_data_product_year1 = self.dataproductclient.create_data_product(
            data_product=dp_obj, stream_definition_id=parsed_stream_def_id)

        print 'new ctd_parsed_data_product_id = ', ctd_parsed_data_product_year1

        self.damsclient.assign_data_product(
            input_resource_id=oldInstDevice_id,
            data_product_id=ctd_parsed_data_product_year1)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(
            ctd_parsed_data_product_year1, PRED.hasStream, None, True)
        print 'test_deployAsPrimaryDevice: Data product streams1 = ', stream_ids

        #-------------------------------
        # Create New InstrumentDevice
        #-------------------------------
        instDevice_obj_2 = IonObject(
            RT.InstrumentDevice,
            name='SBE37IMDeviceYear2',
            description="SBE37IMDevice for the SECOND year of deployment",
            serial_number="67890")
        try:
            newInstDevice_id = self.imsclient.create_instrument_device(
                instrument_device=instDevice_obj_2)
            self.imsclient.assign_instrument_model_to_instrument_device(
                instModel_id, newInstDevice_id)
        except BadRequest as ex:
            self.fail("failed to create new InstrumentDevice: %s" % ex)

        print 'test_deployAsPrimaryDevice: new  Year 2 InstrumentDevice id = ', newInstDevice_id

        #set the LCSTATE
        self.rrclient.execute_lifecycle_transition(newInstDevice_id,
                                                   LCE.DEPLOY)
        self.rrclient.execute_lifecycle_transition(newInstDevice_id,
                                                   LCE.ENABLE)

        instDevice_obj_2 = self.rrclient.read(newInstDevice_id)
        log.debug(
            "test_deployAsPrimaryDevice: Create New InstrumentDevice LCSTATE: %s ",
            str(instDevice_obj_2.lcstate))

        #-------------------------------
        # Create Old Deployment
        #-------------------------------
        deployment_obj = IonObject(RT.Deployment, name='second deployment')

        newDeployment_id = self.omsclient.create_deployment(deployment_obj)

        # deploy this device to the logical slot
        self.imsclient.deploy_instrument_device(newInstDevice_id,
                                                newDeployment_id)
        self.omsclient.deploy_instrument_site(instrumentSite_id,
                                              newDeployment_id)

        #-------------------------------
        # Create InstrumentAgentInstance for NewInstrumentDevice to hold configuration information
        #-------------------------------

        port_agent_config = {
            'device_addr': 'sbe37-simulator.oceanobservatories.org',
            'device_port': 4004,
            'process_type': PortAgentProcessType.UNIX,
            'binary_path': "port_agent",
            'port_agent_addr': 'localhost',
            'command_port': 4005,
            'data_port': 4006,
            'log_level': 5,
            'type': PortAgentType.ETHERNET
        }

        instAgentInstance_obj = IonObject(
            RT.InstrumentAgentInstance,
            name='SBE37IMAgentInstanceYear2',
            description="SBE37IMAgentInstanceYear2",
            port_agent_config=port_agent_config)

        newInstAgentInstance_id = self.imsclient.create_instrument_agent_instance(
            instAgentInstance_obj, instAgent_id, newInstDevice_id)

        #-------------------------------
        # Create CTD Parsed as the Year 2 data product
        #-------------------------------

        dp_obj = IonObject(RT.DataProduct,
                           name='ctd_parsed_year2',
                           description='ctd stream test year 2',
                           temporal_domain=tdom,
                           spatial_domain=sdom)

        ctd_parsed_data_product_year2 = self.dataproductclient.create_data_product(
            data_product=dp_obj, stream_definition_id=parsed_stream_def_id)

        print 'new ctd_parsed_data_product_id = ', ctd_parsed_data_product_year2

        self.damsclient.assign_data_product(
            input_resource_id=newInstDevice_id,
            data_product_id=ctd_parsed_data_product_year2)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(
            ctd_parsed_data_product_year2, PRED.hasStream, None, True)
        print 'test_deployAsPrimaryDevice: Data product streams2 = ', stream_ids

        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Data Process Definition
        #-------------------------------
        log.debug(
            "test_deployAsPrimaryDevice: create data process definition ctd_L0_all"
        )
        dpd_obj = IonObject(
            RT.DataProcessDefinition,
            name='ctd_L0_all',
            description='transform ctd package into three separate L0 streams',
            module='ion.processes.data.transforms.ctd.ctd_L0_all',
            class_name='ctd_L0_all')
        try:
            ctd_L0_all_dprocdef_id = self.dataprocessclient.create_data_process_definition(
                dpd_obj)
        except BadRequest as ex:
            self.fail(
                "failed to create new ctd_L0_all data process definition: %s" %
                ex)

        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Output Data Products
        #-------------------------------

        outgoing_stream_l0_conductivity_id = self.pubsubclient.create_stream_definition(
            name='L0_Conductivity', parameter_dictionary_id=parsed_pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            outgoing_stream_l0_conductivity_id,
            ctd_L0_all_dprocdef_id,
            binding='conductivity')

        outgoing_stream_l0_pressure_id = self.pubsubclient.create_stream_definition(
            name='L0_Pressure', parameter_dictionary_id=parsed_pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            outgoing_stream_l0_pressure_id,
            ctd_L0_all_dprocdef_id,
            binding='pressure')

        outgoing_stream_l0_temperature_id = self.pubsubclient.create_stream_definition(
            name='L0_Temperature', parameter_dictionary_id=parsed_pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            outgoing_stream_l0_temperature_id,
            ctd_L0_all_dprocdef_id,
            binding='temperature')

        self.out_prod_dict = {}
        log.debug(
            "test_deployAsPrimaryDevice: create output data product L0 conductivity"
        )

        ctd_l0_conductivity_output_dp_obj = IonObject(
            RT.DataProduct,
            name='L0_Conductivity',
            description='transform output conductivity',
            temporal_domain=tdom,
            spatial_domain=sdom)

        ctd_l0_conductivity_output_dp_id = self.dataproductclient.create_data_product(
            data_product=ctd_l0_conductivity_output_dp_obj,
            stream_definition_id=parsed_stream_def_id)
        self.out_prod_dict['conductivity'] = ctd_l0_conductivity_output_dp_id
        #self.dataproductclient.activate_data_product_persistence(data_product_id=ctd_l0_conductivity_output_dp_id)

        log.debug(
            "test_deployAsPrimaryDevice: create output data product L0 pressure"
        )

        ctd_l0_pressure_output_dp_obj = IonObject(
            RT.DataProduct,
            name='L0_Pressure',
            description='transform output pressure',
            temporal_domain=tdom,
            spatial_domain=sdom)

        ctd_l0_pressure_output_dp_id = self.dataproductclient.create_data_product(
            data_product=ctd_l0_pressure_output_dp_obj,
            stream_definition_id=parsed_stream_def_id)

        self.out_prod_dict['pressure'] = ctd_l0_pressure_output_dp_id
        #self.dataproductclient.activate_data_product_persistence(data_product_id=ctd_l0_pressure_output_dp_id)

        log.debug(
            "test_deployAsPrimaryDevice: create output data product L0 temperature"
        )

        ctd_l0_temperature_output_dp_obj = IonObject(
            RT.DataProduct,
            name='L0_Temperature',
            description='transform output temperature',
            temporal_domain=tdom,
            spatial_domain=sdom)

        ctd_l0_temperature_output_dp_id = self.dataproductclient.create_data_product(
            data_product=ctd_l0_temperature_output_dp_obj,
            stream_definition_id=parsed_stream_def_id)

        self.out_prod_dict['temperature'] = ctd_l0_temperature_output_dp_id
        #self.dataproductclient.activate_data_product_persistence(data_product_id=ctd_l0_temperature_output_dp_id)

        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Create the data process, listening to  Sim1   (later: logical instrument output product)
        #-------------------------------
        log.debug(
            "test_deployAsPrimaryDevice: create L0 all data_process start")
        try:
            out_data_products = self.out_prod_dict.values()
            ctd_l0_all_data_process_id = self.dataprocessclient.create_data_process(
                ctd_L0_all_dprocdef_id, [ctd_parsed_data_product_year1],
                out_data_products)
            self.dataprocessclient.activate_data_process(
                ctd_l0_all_data_process_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" % ex)
        log.debug(
            "test_deployAsPrimaryDevice: create L0 all data_process return")

        #--------------------------------
        # Activate the deployment
        #--------------------------------
        self.omsclient.activate_deployment(oldDeployment_id)

        #-------------------------------
        # Launch InstrumentAgentInstance Sim1, connect to the resource agent client
        #-------------------------------
        self.imsclient.start_instrument_agent_instance(
            instrument_agent_instance_id=oldInstAgentInstance_id)
        self.addCleanup(self.imsclient.stop_instrument_agent_instance,
                        instrument_agent_instance_id=oldInstAgentInstance_id)

        #wait for start
        instance_obj = self.imsclient.read_instrument_agent_instance(
            oldInstAgentInstance_id)
        gate = AgentProcessStateGate(self.processdispatchclient.read_process,
                                     oldInstDevice_id,
                                     ProcessStateEnum.RUNNING)
        self.assertTrue(
            gate. await (30),
            "The instrument agent instance (%s) did not spawn in 30 seconds" %
            gate.process_id)

        inst_agent1_instance_obj = self.imsclient.read_instrument_agent_instance(
            oldInstAgentInstance_id)
        print 'test_deployAsPrimaryDevice: Instrument agent instance obj: = ', inst_agent1_instance_obj

        # Start a resource agent client to talk with the instrument agent.
        self._ia_client_sim1 = ResourceAgentClient('iaclient Sim1',
                                                   name=gate.process_id,
                                                   process=FakeProcess())
        print 'activate_instrument: got _ia_client_sim1 %s', self._ia_client_sim1
        log.debug(" test_deployAsPrimaryDevice:: got _ia_client_sim1 %s",
                  str(self._ia_client_sim1))

        #-------------------------------
        # Launch InstrumentAgentInstance Sim2, connect to the resource agent client
        #-------------------------------
        self.imsclient.start_instrument_agent_instance(
            instrument_agent_instance_id=newInstAgentInstance_id)
        self.addCleanup(self.imsclient.stop_instrument_agent_instance,
                        instrument_agent_instance_id=newInstAgentInstance_id)

        #wait for start
        instance_obj = self.imsclient.read_instrument_agent_instance(
            newInstAgentInstance_id)
        gate = AgentProcessStateGate(self.processdispatchclient.read_process,
                                     oldInstDevice_id,
                                     ProcessStateEnum.RUNNING)
        self.assertTrue(
            gate. await (30),
            "The instrument agent instance (%s) did not spawn in 30 seconds" %
            gate.process_id)

        inst_agent2_instance_obj = self.imsclient.read_instrument_agent_instance(
            newInstAgentInstance_id)
        print 'test_deployAsPrimaryDevice: Instrument agent instance obj: = ', inst_agent2_instance_obj

        # Start a resource agent client to talk with the instrument agent.
        self._ia_client_sim2 = ResourceAgentClient('iaclient Sim2',
                                                   name=gate.process_id,
                                                   process=FakeProcess())
        print 'activate_instrument: got _ia_client_sim2 %s', self._ia_client_sim2
        log.debug(" test_deployAsPrimaryDevice:: got _ia_client_sim2 %s",
                  str(self._ia_client_sim2))

        #-------------------------------
        # Streaming Sim1 (old instrument)
        #-------------------------------

        cmd = AgentCommand(command=ResourceAgentEvent.INITIALIZE)
        retval = self._ia_client_sim1.execute_agent(cmd)
        log.debug("test_deployAsPrimaryDevice: initialize %s", str(retval))

        log.debug("(L4-CI-SA-RQ-334): Sending go_active command ")
        cmd = AgentCommand(command=ResourceAgentEvent.GO_ACTIVE)
        reply = self._ia_client_sim1.execute_agent(cmd)
        log.debug("test_deployAsPrimaryDevice: return value from go_active %s",
                  str(reply))
        self.assertTrue(reply)

        cmd = AgentCommand(command=ResourceAgentEvent.GET_RESOURCE_STATE)
        retval = self._ia_client_sim1.execute_agent(cmd)
        state = retval.result
        log.debug(
            "(L4-CI-SA-RQ-334): current state after sending go_active command %s",
            str(state))

        cmd = AgentCommand(command=ResourceAgentEvent.RUN)
        reply = self._ia_client_sim1.execute_agent(cmd)
        log.debug("test_deployAsPrimaryDevice: run %s", str(reply))

        gevent.sleep(2)

        cmd = AgentCommand(command=SBE37ProtocolEvent.START_AUTOSAMPLE)
        retval = self._ia_client_sim1.execute_resource(cmd)
        log.debug(
            "test_activateInstrumentSample: return from START_AUTOSAMPLE: %s",
            str(retval))

        #-------------------------------
        # Streaming Sim 2 (new instrument)
        #-------------------------------

        cmd = AgentCommand(command=ResourceAgentEvent.INITIALIZE)
        retval = self._ia_client_sim2.execute_agent(cmd)
        log.debug("test_deployAsPrimaryDevice: initialize_sim2 %s",
                  str(retval))

        log.debug("(L4-CI-SA-RQ-334): Sending go_active command ")
        cmd = AgentCommand(command=ResourceAgentEvent.GO_ACTIVE)
        reply = self._ia_client_sim2.execute_agent(cmd)
        log.debug(
            "test_deployAsPrimaryDevice: return value from go_active_sim2 %s",
            str(reply))

        cmd = AgentCommand(command=ResourceAgentEvent.GET_RESOURCE_STATE)
        retval = self._ia_client_sim2.execute_agent(cmd)
        state = retval.result
        log.debug(
            "(L4-CI-SA-RQ-334): current state after sending go_active_sim2 command %s",
            str(state))

        cmd = AgentCommand(command=ResourceAgentEvent.RUN)
        reply = self._ia_client_sim2.execute_agent(cmd)
        log.debug("test_deployAsPrimaryDevice: run %s", str(reply))

        gevent.sleep(2)

        cmd = AgentCommand(command=SBE37ProtocolEvent.START_AUTOSAMPLE)
        retval = self._ia_client_sim2.execute_resource(cmd)
        log.debug(
            "test_activateInstrumentSample: return from START_AUTOSAMPLE_sim2: %s",
            str(retval))

        gevent.sleep(10)

        #-------------------------------
        # Shutdown Sim1 (old instrument)
        #-------------------------------
        cmd = AgentCommand(command=SBE37ProtocolEvent.STOP_AUTOSAMPLE)
        retval = self._ia_client_sim1.execute_resource(cmd)
        log.debug(
            "test_activateInstrumentSample: return from STOP_AUTOSAMPLE: %s",
            str(retval))

        log.debug("test_activateInstrumentSample: calling reset ")
        cmd = AgentCommand(command=ResourceAgentEvent.RESET)
        reply = self._ia_client_sim1.execute_agent(cmd)
        log.debug("test_activateInstrumentSample: return from reset %s",
                  str(reply))
        time.sleep(5)

        #-------------------------------
        # Shutdown Sim2 (old instrument)
        #-------------------------------
        cmd = AgentCommand(command=SBE37ProtocolEvent.STOP_AUTOSAMPLE)
        retval = self._ia_client_sim2.execute_resource(cmd)
        log.debug(
            "test_activateInstrumentSample: return from STOP_AUTOSAMPLE_sim2: %s",
            str(retval))

        log.debug("test_activateInstrumentSample: calling reset_sim2 ")
        cmd = AgentCommand(command=ResourceAgentEvent.RESET)
        reply = self._ia_client_sim1.execute_agent(cmd)
        log.debug("test_activateInstrumentSample: return from reset_sim2 %s",
                  str(reply))
        time.sleep(5)