class TestResourceRegistryAttachments(IonIntegrationTestCase):
    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()
        #container = Container()
        #print 'starting container'
        #container.start()
        #print 'started container'

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        self.RR = ResourceRegistryServiceClient(node=self.container.node)

        print 'started services'

    def test_resource_registry_blob_sanity(self):
        resource_id, _ = self.RR.create(IonObject(RT.Resource, name="foo"))

        MY_CONTENT = "the quick brown fox etc etc etc"

        #save
        att_id = self.RR.create_attachment(
            resource_id,
            IonObject(RT.Attachment,
                      name="test.txt",
                      content=MY_CONTENT,
                      content_type="text/plain",
                      keywords=["test1", "test2"],
                      attachment_type=AttachmentType.BLOB))

        #load
        attachment = self.RR.read_attachment(att_id)
        self.assertEqual("test.txt", attachment.name)
        self.assertEqual("text/plain", attachment.content_type)
        self.assertIn("test1", attachment.keywords)
        self.assertIn("test2", attachment.keywords)

        #content has changed; it's base64-encoded from what we put in
        self.assertEqual(MY_CONTENT, attachment.content)
class TestResourceRegistryAttachments(IonIntegrationTestCase):

    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()
        #container = Container()
        #print 'starting container'
        #container.start()
        #print 'started container'

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        self.RR  = ResourceRegistryServiceClient(node=self.container.node)

        print 'started services'

    def test_resource_registry_blob_sanity(self):
        resource_id, _ = self.RR.create(IonObject(RT.Resource, name="foo"))

        MY_CONTENT = "the quick brown fox etc etc etc"

        #save
        att_id = self.RR.create_attachment(resource_id,  IonObject(RT.Attachment,
                                                          name="test.txt",
                                                          content=MY_CONTENT,
                                                          content_type="text/plain",
                                                          keywords=["test1", "test2"],
                                                          attachment_type=AttachmentType.BLOB))
        
        #load
        attachment = self.RR.read_attachment(att_id)
        self.assertEqual("test.txt", attachment.name)
        self.assertEqual("text/plain", attachment.content_type)
        self.assertIn("test1", attachment.keywords)
        self.assertIn("test2", attachment.keywords)

        #content has changed; it's base64-encoded from what we put in
        self.assertEqual(MY_CONTENT, attachment.content)
Esempio n. 3
0
class TestIMSRegisterAgentIntegration(IonIntegrationTestCase):

    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()
        #container = Container()
        #print 'starting container'
        #container.start()
        #print 'started container'

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        self.RR  = ResourceRegistryServiceClient(node=self.container.node)
        self.IMS = InstrumentManagementServiceClient(node=self.container.node)

        print 'started services'


#    def test_just_the_setup(self):
#        return

    @unittest.skip('OOIION-1655 Implemented as part of R3 D041 Data Processing Management Service')
    def test_register_instrument_agent_int(self):

        #test ssh-ability
        cfg_host = CFG.service.instrument_management.driver_release_host #'amoeaba.ucsd.edu'
        cfg_user = pwd.getpwuid(os.getuid())[0]

        if "driver_release_user" in CFG.service.instrument_management:
            cfg_user = CFG.service.instrument_management.driver_release_user

        remotehost = "%s@%s" % (cfg_user, cfg_host)

        ssh_retval = subprocess.call(["ssh", "-o", "PasswordAuthentication=no",
                                      "-o", "StrictHostKeyChecking=no",
                                      remotehost, "-f", "true"],
                                    stdout=open(os.devnull),
                                    stderr=open(os.devnull))
        
        if 0 != ssh_retval:
            raise unittest.SkipTest("SSH/SCP credentials to %s didn't work" % remotehost)



        inst_agent_id = self.IMS.create_instrument_agent(any_old(RT.InstrumentAgent))
        inst_model_id = self.IMS.create_instrument_model(any_old(RT.InstrumentModel))

        self.IMS.assign_instrument_model_to_instrument_agent(inst_model_id, inst_agent_id)

        self.IMS.register_instrument_agent(inst_agent_id, BASE64_EGG, BASE64_ZIPFILE)

        attachments, _ = self.RR.find_objects(inst_agent_id, PRED.hasAttachment, RT.Attachment, True)

        self.assertEqual(len(attachments), 4)

        for a_id in attachments:

            a = self.RR.read_attachment(a_id, include_content=True)

            parts = string.split(a.name, ".")

            # we expect a.txt to contain "aaa" and have a keyword listed called "a"
            if "txt" == parts[1]:
                self.assertEqual("text/plain", a.content_type)
                self.assertIn(parts[0], a.keywords)
                self.assertEqual(a.content, str(parts[0] * 3) + "\n")

            elif "text/url" == a.content_type:
                remote_url = a.content.split("URL=")[1]

                failmsg = ""
                try:
                    code = urlopen(remote_url).code
                    if 400 <= code:
                        failmsg = "HTTP code %s" % code
                except Exception as e:
                    failmsg = str(e)

                if failmsg:
                    self.fail(("Uploaded succeeded, but fetching '%s' failed with '%s'. ") %
                              (remote_url, failmsg))


        log.info("L4-CI-SA-RQ-148")
        log.info("L4-CI-SA-RQ-148: The test services shall ensure that test results are incorporated into physical resource metadata. ")
        log.info("L4-CI-SA-RQ-336: Instrument activation shall support the registration of Instrument Agents")

        # cleanup
        self.IMS.force_delete_instrument_agent(inst_agent_id)
        self.IMS.force_delete_instrument_model(inst_model_id)

        return
Esempio n. 4
0
class TestResourceRegistryAttachments(IonIntegrationTestCase):
    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()
        #container = Container()
        #print 'starting container'
        #container.start()
        #print 'started container'

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        self.RR = ResourceRegistryServiceClient(node=self.container.node)

        print 'started services'

    def test_resource_registry_blob_sanity(self):
        resource_id, _ = self.RR.create(IonObject(RT.Resource, name="foo"))

        MY_CONTENT = "the quick brown fox etc etc etc"

        #save
        att_id = self.RR.create_attachment(
            resource_id,
            IonObject(RT.Attachment,
                      name="test.txt",
                      content=MY_CONTENT,
                      content_type="text/plain",
                      keywords=["test1", "test2"],
                      attachment_type=AttachmentType.BLOB))

        #load
        attachment = self.RR.read_attachment(att_id, include_content=True)
        self.assertEqual("test.txt", attachment.name)
        self.assertEqual("text/plain", attachment.content_type)
        self.assertIn("test1", attachment.keywords)
        self.assertIn("test2", attachment.keywords)

        #content has changed; it's base64-encoded from what we put in
        self.assertEqual(MY_CONTENT, attachment.content)

        obj = self.RR.read(resource_id)
        self.assertEqual(obj.name, "foo")
        obj.name = "TheDudeAbides"
        obj = self.RR.update(obj)
        obj = self.RR.read(resource_id)
        self.assertEqual(obj.name, "TheDudeAbides")

        att = self.RR.find_attachments(resource_id)
        self.assertNotEqual(att, None)

        actor_identity_obj = IonObject("ActorIdentity", name="name")
        actor_identity_obj_id, actor_identity_obj_rev = self.RR.create(
            actor_identity_obj)
        user_info_obj = IonObject("UserInfo", name="name")
        user_info_obj_id, user_info_obj_rev = self.RR.create(user_info_obj)
        assoc_id, assoc_rev = self.RR.create_association(
            actor_identity_obj_id, PRED.hasInfo, user_info_obj_id)
        self.assertNotEqual(assoc_id, None)

        find_assoc = self.RR.find_associations(actor_identity_obj_id,
                                               PRED.hasInfo, user_info_obj_id)
        self.assertTrue(find_assoc[0]._id == assoc_id)
        subj = self.RR.find_subjects(RT.ActorIdentity, PRED.hasInfo,
                                     user_info_obj_id, True)

        res_obj1 = self.RR.read_object(actor_identity_obj_id, PRED.hasInfo,
                                       RT.UserInfo)
        self.assertEquals(res_obj1._id, user_info_obj_id)

        self.RR.delete_association(assoc_id)
        self.RR.delete_attachment(att_id)
        self.RR.delete(resource_id)
class TestInstrumentManagementServiceIntegration(IonIntegrationTestCase):

    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()
        #container = Container()
        #print 'starting container'
        #container.start()
        #print 'started container'

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        self.RR   = ResourceRegistryServiceClient(node=self.container.node)
        self.IMS  = InstrumentManagementServiceClient(node=self.container.node)
        self.IDS  = IdentityManagementServiceClient(node=self.container.node)
        self.PSC  = PubsubManagementServiceClient(node=self.container.node)
        self.DP   = DataProductManagementServiceClient(node=self.container.node)
        self.DAMS = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.DSC  = DatasetManagementServiceClient(node=self.container.node)
        self.PDC  = ProcessDispatcherServiceClient(node=self.container.node)

        self.RR2 = EnhancedResourceRegistryClient(self.RR)

#    @unittest.skip('this test just for debugging setup')
#    def test_just_the_setup(self):
#        return

    @attr('EXT')
    def test_resources_associations_extensions(self):
        """
        create one of each resource and association used by IMS
        to guard against problems in ion-definitions
        """
        
        #stuff we control
        instrument_agent_instance_id, _ =  self.RR.create(any_old(RT.InstrumentAgentInstance))
        instrument_agent_id, _ =           self.RR.create(any_old(RT.InstrumentAgent))
        instrument_model_id, _ =           self.RR.create(any_old(RT.InstrumentModel))
        instrument_device_id, _ =          self.RR.create(any_old(RT.InstrumentDevice))
        platform_agent_instance_id, _ =    self.RR.create(any_old(RT.PlatformAgentInstance))
        platform_agent_id, _ =             self.RR.create(any_old(RT.PlatformAgent))
        platform_device_id, _ =            self.RR.create(any_old(RT.PlatformDevice))
        platform_model_id, _ =             self.RR.create(any_old(RT.PlatformModel))
        sensor_device_id, _ =              self.RR.create(any_old(RT.SensorDevice))
        sensor_model_id, _ =               self.RR.create(any_old(RT.SensorModel))

        #stuff we associate to
        data_producer_id, _      = self.RR.create(any_old(RT.DataProducer))
        org_id, _ =                self.RR.create(any_old(RT.Org))

        #instrument_agent_instance_id #is only a target
        
        #instrument_agent
        self.RR.create_association(instrument_agent_id, PRED.hasModel, instrument_model_id)
        self.RR.create_association(instrument_agent_instance_id, PRED.hasAgentDefinition, instrument_agent_id)

        #instrument_device
        self.RR.create_association(instrument_device_id, PRED.hasModel, instrument_model_id)
        self.RR.create_association(instrument_device_id, PRED.hasAgentInstance, instrument_agent_instance_id)
        self.RR.create_association(instrument_device_id, PRED.hasDataProducer, data_producer_id)
        self.RR.create_association(instrument_device_id, PRED.hasDevice, sensor_device_id)
        self.RR.create_association(org_id, PRED.hasResource, instrument_device_id)


        instrument_model_id #is only a target

        platform_agent_instance_id #is only a target
        
        #platform_agent
        self.RR.create_association(platform_agent_id, PRED.hasModel, platform_model_id)
        self.RR.create_association(platform_agent_instance_id, PRED.hasAgentDefinition, platform_agent_id)

        #platform_device
        self.RR.create_association(platform_device_id, PRED.hasModel, platform_model_id)
        self.RR.create_association(platform_device_id, PRED.hasAgentInstance, platform_agent_instance_id)
        self.RR.create_association(platform_device_id, PRED.hasDevice, instrument_device_id)

        platform_model_id #is only a target

        #sensor_device
        self.RR.create_association(sensor_device_id, PRED.hasModel, sensor_model_id)
        self.RR.create_association(sensor_device_id, PRED.hasDevice, instrument_device_id)

        sensor_model_id #is only a target

        #create a parsed product for this instrument output
        tdom, sdom = time_series_domain()
        tdom = tdom.dump()
        sdom = sdom.dump()
        dp_obj = IonObject(RT.DataProduct,
            name='the parsed data',
            description='ctd stream test',
            processing_level_code='Parsed_Canonical',
            temporal_domain = tdom,
            spatial_domain = sdom)
        pdict_id = self.DSC.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        parsed_stream_def_id = self.PSC.create_stream_definition(name='parsed', parameter_dictionary_id=pdict_id)
        data_product_id1 = self.DP.create_data_product(data_product=dp_obj, stream_definition_id=parsed_stream_def_id)
        log.debug( 'new dp_id = %s', data_product_id1)

        self.DAMS.assign_data_product(input_resource_id=instrument_device_id, data_product_id=data_product_id1)


        def addInstOwner(inst_id, subject):

            actor_identity_obj = any_old(RT.ActorIdentity, {"name": subject})
            user_id = self.IDS.create_actor_identity(actor_identity_obj)
            user_info_obj = any_old(RT.UserInfo)
            user_info_id = self.IDS.create_user_info(user_id, user_info_obj)

            self.RR.create_association(inst_id, PRED.hasOwner, user_id)


        #Testing multiple instrument owners
        addInstOwner(instrument_device_id, "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Roger Unwin A254")
        addInstOwner(instrument_device_id, "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Bob Cumbers A256")

        extended_instrument = self.IMS.get_instrument_device_extension(instrument_device_id)

        self.assertEqual(instrument_device_id, extended_instrument._id)
        self.assertEqual(len(extended_instrument.owners), 2)
        self.assertEqual(extended_instrument.instrument_model._id, instrument_model_id)

        # Lifecycle
        self.assertEquals(len(extended_instrument.lcstate_transitions), 7)
        self.assertEquals(set(extended_instrument.lcstate_transitions.keys()), set(['enable', 'develop', 'deploy', 'retire', 'plan', 'integrate', 'announce']))

        # Verify that computed attributes exist for the extended instrument
        self.assertIsInstance(extended_instrument.computed.firmware_version, ComputedFloatValue)
        self.assertIsInstance(extended_instrument.computed.last_data_received_datetime, ComputedFloatValue)
        self.assertIsInstance(extended_instrument.computed.last_calibration_datetime, ComputedFloatValue)
        self.assertIsInstance(extended_instrument.computed.uptime, ComputedStringValue)

        self.assertIsInstance(extended_instrument.computed.power_status_roll_up, ComputedIntValue)
        self.assertIsInstance(extended_instrument.computed.communications_status_roll_up, ComputedIntValue)
        self.assertIsInstance(extended_instrument.computed.data_status_roll_up, ComputedIntValue)
        self.assertIsInstance(extended_instrument.computed.location_status_roll_up, ComputedIntValue)

        log.debug("extended_instrument.computed: %s", extended_instrument.computed)

        #check model
        inst_model_obj = self.RR.read(instrument_model_id)
        self.assertEqual(inst_model_obj.name, extended_instrument.instrument_model.name)

        #check agent instance
        inst_agent_instance_obj = self.RR.read(instrument_agent_instance_id)
        self.assertEqual(inst_agent_instance_obj.name, extended_instrument.agent_instance.name)

        #check agent
        inst_agent_obj = self.RR.read(instrument_agent_id)
        #compound assoc return list of lists so check the first element
        self.assertEqual(inst_agent_obj.name, extended_instrument.instrument_agent.name)

        #check platform device
        plat_device_obj = self.RR.read(platform_device_id)
        self.assertEqual(plat_device_obj.name, extended_instrument.platform_device.name)

        extended_platform = self.IMS.get_platform_device_extension(platform_device_id)

        self.assertEqual(1, len(extended_platform.instrument_devices))
        self.assertEqual(instrument_device_id, extended_platform.instrument_devices[0]._id)
        self.assertEqual(1, len(extended_platform.instrument_models))
        self.assertEqual(instrument_model_id, extended_platform.instrument_models[0]._id)
        self.assertEquals(extended_platform.platform_agent._id, platform_agent_id)

        self.assertEquals(len(extended_platform.lcstate_transitions), 7)
        self.assertEquals(set(extended_platform.lcstate_transitions.keys()), set(['enable', 'develop', 'deploy', 'retire', 'plan', 'integrate', 'announce']))

        #check sensor devices
        self.assertEqual(1, len(extended_instrument.sensor_devices))

        #check data_product_parameters_set
        self.assertEqual(ComputedValueAvailability.PROVIDED,
                         extended_instrument.computed.data_product_parameters_set.status)
        self.assertTrue( 'Parsed_Canonical' in extended_instrument.computed.data_product_parameters_set.value)
        # the ctd parameters should include 'temp'
        self.assertTrue( 'temp' in extended_instrument.computed.data_product_parameters_set.value['Parsed_Canonical'])

        #none of these will work because there is no agent
        self.assertEqual(ComputedValueAvailability.NOTAVAILABLE,
                         extended_instrument.computed.firmware_version.status)
#        self.assertEqual(ComputedValueAvailability.NOTAVAILABLE,
#                         extended_instrument.computed.operational_state.status)
#        self.assertEqual(ComputedValueAvailability.PROVIDED,
#                         extended_instrument.computed.power_status_roll_up.status)
#        self.assertEqual(ComputedValueAvailability.PROVIDED,
#                         extended_instrument.computed.communications_status_roll_up.status)
#        self.assertEqual(ComputedValueAvailability.PROVIDED,
#                         extended_instrument.computed.data_status_roll_up.status)
#        self.assertEqual(StatusType.STATUS_OK,
#                        extended_instrument.computed.data_status_roll_up.value)
#        self.assertEqual(ComputedValueAvailability.PROVIDED,
#                         extended_instrument.computed.location_status_roll_up.status)

#        self.assertEqual(ComputedValueAvailability.PROVIDED,
#                         extended_instrument.computed.recent_events.status)
#        self.assertEqual([], extended_instrument.computed.recent_events.value)


        # cleanup
        c = DotDict()
        c.resource_registry = self.RR
        self.RR2.pluck(instrument_agent_id)
        self.RR2.pluck(instrument_model_id)
        self.RR2.pluck(instrument_device_id)
        self.RR2.pluck(platform_agent_id)
        self.RR2.pluck(sensor_device_id)
        self.IMS.force_delete_instrument_agent(instrument_agent_id)
        self.IMS.force_delete_instrument_model(instrument_model_id)
        self.IMS.force_delete_instrument_device(instrument_device_id)
        self.IMS.force_delete_platform_agent_instance(platform_agent_instance_id)
        self.IMS.force_delete_platform_agent(platform_agent_id)
        self.IMS.force_delete_platform_device(platform_device_id)
        self.IMS.force_delete_platform_model(platform_model_id)
        self.IMS.force_delete_sensor_device(sensor_device_id)
        self.IMS.force_delete_sensor_model(sensor_model_id)

        #stuff we associate to
        self.RR.delete(data_producer_id)
        self.RR.delete(org_id)



    def test_custom_attributes(self):
        """
        Test assignment of custom attributes
        """

        instrument_model_id, _ =           self.RR.create(any_old(RT.InstrumentModel,
                {"custom_attributes":
                         {"favorite_color": "attr desc goes here"}
            }))
        instrument_device_id, _ =          self.RR.create(any_old(RT.InstrumentDevice,
                {"custom_attributes":
                         {"favorite_color": "red",
                          "bogus_attr": "should raise warning"
                     }
            }))

        self.IMS.assign_instrument_model_to_instrument_device(instrument_model_id, instrument_device_id)

        # cleanup
        self.IMS.force_delete_instrument_device(instrument_device_id)
        self.IMS.force_delete_instrument_model(instrument_model_id)






    def _get_datastore(self, dataset_id):
        dataset = self.DSC.read_dataset(dataset_id)
        datastore_name = dataset.datastore_name
        datastore = self.container.datastore_manager.get_datastore(datastore_name, DataStore.DS_PROFILE.SCIDATA)
        return datastore




    def test_resource_state_save_restore(self):

        # Create InstrumentModel
        instModel_obj = IonObject(RT.InstrumentModel,
                                  name='SBE37IMModel',
                                  description="SBE37IMModel")
        instModel_id = self.IMS.create_instrument_model(instModel_obj)
        log.debug( 'new InstrumentModel id = %s ', instModel_id)

        # Create InstrumentAgent
        raw_config = StreamConfiguration(stream_name='raw', parameter_dictionary_name='ctd_raw_param_dict', records_per_granule=2, granule_publish_rate=5 )
        parsed_config = StreamConfiguration(stream_name='parsed', parameter_dictionary_name='ctd_parsed_param_dict', records_per_granule=2, granule_publish_rate=5 )
        instAgent_obj = IonObject(RT.InstrumentAgent,
                                  name='agent007',
                                  description="SBE37IMAgent",
                                  driver_uri="http://sddevrepo.oceanobservatories.org/releases/seabird_sbe37smb_ooicore-0.0.1-py2.7.egg",
                                  stream_configurations = [raw_config, parsed_config] )
        instAgent_id = self.IMS.create_instrument_agent(instAgent_obj)
        log.debug( 'new InstrumentAgent id = %s', instAgent_id)

        self.IMS.assign_instrument_model_to_instrument_agent(instModel_id, instAgent_id)

        # Create InstrumentDevice
        log.debug('test_activateInstrumentSample: Create instrument resource to represent the SBE37 '
        + '(SA Req: L4-CI-SA-RQ-241) ')
        instDevice_obj = IonObject(RT.InstrumentDevice,
                                   name='SBE37IMDevice',
                                   description="SBE37IMDevice",
                                   serial_number="12345" )
        instDevice_id = self.IMS.create_instrument_device(instrument_device=instDevice_obj)
        self.IMS.assign_instrument_model_to_instrument_device(instModel_id, instDevice_id)

        log.debug("test_activateInstrumentSample: new InstrumentDevice id = %s    (SA Req: L4-CI-SA-RQ-241) ",
                  instDevice_id)

        port_agent_config = {
            'device_addr':  CFG.device.sbe37.host,
            'device_port':  CFG.device.sbe37.port,
            'process_type': PortAgentProcessType.UNIX,
            'binary_path': "port_agent",
            'port_agent_addr': 'localhost',
            'command_port': CFG.device.sbe37.port_agent_cmd_port,
            'data_port': CFG.device.sbe37.port_agent_data_port,
            'log_level': 5,
            'type': PortAgentType.ETHERNET
        }

        instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='SBE37IMAgentInstance',
                                          description="SBE37IMAgentInstance",
                                          port_agent_config = port_agent_config)


        instAgentInstance_id = self.IMS.create_instrument_agent_instance(instAgentInstance_obj,
                                                                               instAgent_id,
                                                                               instDevice_id)

        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()


        spdict_id = self.DSC.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        parsed_stream_def_id = self.PSC.create_stream_definition(name='parsed', parameter_dictionary_id=spdict_id)

        rpdict_id = self.DSC.read_parameter_dictionary_by_name('ctd_raw_param_dict', id_only=True)
        raw_stream_def_id = self.PSC.create_stream_definition(name='raw', parameter_dictionary_id=rpdict_id)


        #-------------------------------
        # Create Raw and Parsed Data Products for the device
        #-------------------------------

        dp_obj = IonObject(RT.DataProduct,
                           name='the parsed data',
                           description='ctd stream test',
                           temporal_domain = tdom,
                           spatial_domain = sdom)

        data_product_id1 = self.DP.create_data_product(data_product=dp_obj,
                                                       stream_definition_id=parsed_stream_def_id)
                                                       
        log.debug( 'new dp_id = %s', data_product_id1)

        self.DAMS.assign_data_product(input_resource_id=instDevice_id, data_product_id=data_product_id1)
        self.DP.activate_data_product_persistence(data_product_id=data_product_id1)



        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.RR.find_objects(data_product_id1, PRED.hasStream, None, True)
        log.debug( 'Data product streams1 = %s', stream_ids)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        dataset_ids, _ = self.RR.find_objects(data_product_id1, PRED.hasDataset, RT.Dataset, True)
        log.debug( 'Data set for data_product_id1 = %s', dataset_ids[0])
        self.parsed_dataset = dataset_ids[0]
        #create the datastore at the beginning of each int test that persists data



        dp_obj = IonObject(RT.DataProduct,
                           name='the raw data',
                           description='raw stream test',
                           temporal_domain = tdom,
                           spatial_domain = sdom)

        data_product_id2 = self.DP.create_data_product(data_product=dp_obj,
                                                       stream_definition_id=raw_stream_def_id)
        log.debug( 'new dp_id = %s', str(data_product_id2))

        self.DAMS.assign_data_product(input_resource_id=instDevice_id, data_product_id=data_product_id2)

        self.DP.activate_data_product_persistence(data_product_id=data_product_id2)

        # spin up agent
        self.IMS.start_instrument_agent_instance(instrument_agent_instance_id=instAgentInstance_id)


        self.addCleanup(self.IMS.stop_instrument_agent_instance,
                        instrument_agent_instance_id=instAgentInstance_id)

        #wait for start
        instance_obj = self.IMS.read_instrument_agent_instance(instAgentInstance_id)
        gate = ProcessStateGate(self.PDC.read_process,
                                instance_obj.agent_process_id,
                                ProcessStateEnum.RUNNING)
        self.assertTrue(gate.await(30), "The instrument agent instance (%s) did not spawn in 30 seconds" %
                                        instance_obj.agent_process_id)


        # take snapshot of config
        snap_id = self.IMS.save_resource_state(instDevice_id, "xyzzy snapshot")
        snap_obj = self.RR.read_attachment(snap_id, include_content=True)
        print "Saved config:"
        print snap_obj.content

        #modify config
        instance_obj.driver_config["comms_config"] = "BAD_DATA"
        self.RR.update(instance_obj)

        #restore config
        self.IMS.restore_resource_state(instDevice_id, snap_id)
        instance_obj = self.RR.read(instAgentInstance_id)
        self.assertNotEqual("BAD_DATA", instance_obj.driver_config["comms_config"])

        
        self.DP.delete_data_product(data_product_id1)
        self.DP.delete_data_product(data_product_id2)



    def test_agent_instance_config(self):
        """
        Verify that agent configurations are being built properly
        """
        clients = DotDict()
        clients.resource_registry  = self.RR
        clients.pubsub_management  = self.PSC
        clients.dataset_management = self.DSC
        pconfig_builder = PlatformAgentConfigurationBuilder(clients)
        iconfig_builder = InstrumentAgentConfigurationBuilder(clients)


        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()

        org_obj = any_old(RT.Org)
        org_id = self.RR2.create(org_obj)

        inst_startup_config = {'startup': 'config'}

        generic_alerts_config = {'lvl1': {'lvl2': 'lvl3val'}}

        required_config_keys = [
            'org_name',
            'device_type',
            'agent',
            'driver_config',
            'stream_config',
            'startup_config',
            'aparam_alert_config',
            'children']



        def verify_instrument_config(config, device_id):
            for key in required_config_keys:
                self.assertIn(key, config)
            self.assertEqual(org_obj.name, config['org_name'])
            self.assertEqual(RT.InstrumentDevice, config['device_type'])
            self.assertIn('driver_config', config)
            driver_config = config['driver_config']
            expected_driver_fields = {'process_type': ('ZMQPyClassDriverLauncher',),
                                      }
            for k, v in expected_driver_fields.iteritems():
                self.assertIn(k, driver_config)
                self.assertEqual(v, driver_config[k])
            self.assertEqual

            self.assertEqual({'resource_id': device_id}, config['agent'])
            self.assertEqual(inst_startup_config, config['startup_config'])
            self.assertIn('aparam_alert_config', config)
            self.assertEqual(generic_alerts_config, config['aparam_alert_config'])
            self.assertIn('stream_config', config)
            for key in ['children']:
                self.assertEqual({}, config[key])


        def verify_child_config(config, device_id, inst_device_id=None):
            for key in required_config_keys:
                self.assertIn(key, config)
            self.assertEqual(org_obj.name, config['org_name'])
            self.assertEqual(RT.PlatformDevice, config['device_type'])
            self.assertEqual({'resource_id': device_id}, config['agent'])
            self.assertIn('aparam_alert_config', config)
            self.assertEqual(generic_alerts_config, config['aparam_alert_config'])
            self.assertIn('stream_config', config)
            self.assertIn('driver_config', config)
            self.assertIn('foo', config['driver_config'])
            self.assertEqual('bar', config['driver_config']['foo'])
            self.assertIn('process_type', config['driver_config'])
            self.assertEqual(('ZMQPyClassDriverLauncher',), config['driver_config']['process_type'])

            if None is inst_device_id:
                for key in ['children', 'startup_config']:
                    self.assertEqual({}, config[key])
            else:
                for key in ['startup_config']:
                    self.assertEqual({}, config[key])

                self.assertIn(inst_device_id, config['children'])
                verify_instrument_config(config['children'][inst_device_id], inst_device_id)


        def verify_parent_config(config, parent_device_id, child_device_id, inst_device_id=None):
            for key in required_config_keys:
                self.assertIn(key, config)
            self.assertEqual(org_obj.name, config['org_name'])
            self.assertEqual(RT.PlatformDevice, config['device_type'])
            self.assertIn('process_type', config['driver_config'])
            self.assertEqual(('ZMQPyClassDriverLauncher',), config['driver_config']['process_type'])
            self.assertEqual({'resource_id': parent_device_id}, config['agent'])
            self.assertIn('aparam_alert_config', config)
            self.assertEqual(generic_alerts_config, config['aparam_alert_config'])
            self.assertIn('stream_config', config)
            for key in ['startup_config']:
                self.assertEqual({}, config[key])

            self.assertIn(child_device_id, config['children'])
            verify_child_config(config['children'][child_device_id], child_device_id, inst_device_id)






        rpdict_id = self.DSC.read_parameter_dictionary_by_name('ctd_raw_param_dict', id_only=True)
        raw_stream_def_id = self.PSC.create_stream_definition(name='raw', parameter_dictionary_id=rpdict_id)
        #todo: create org and figure out which agent resource needs to get assigned to it


        def _make_platform_agent_structure(agent_config=None):
            if None is agent_config: agent_config = {}

            # instance creation
            platform_agent_instance_obj = any_old(RT.PlatformAgentInstance, {'driver_config': {'foo': 'bar'},
                                                                             'alerts': generic_alerts_config})
            platform_agent_instance_obj.agent_config = agent_config
            platform_agent_instance_id = self.IMS.create_platform_agent_instance(platform_agent_instance_obj)

            # agent creation
            raw_config = StreamConfiguration(stream_name='raw', parameter_dictionary_name='ctd_raw_param_dict', records_per_granule=2, granule_publish_rate=5 )
            platform_agent_obj = any_old(RT.PlatformAgent, {"stream_configurations":[raw_config]})
            platform_agent_id = self.IMS.create_platform_agent(platform_agent_obj)

            # device creation
            platform_device_id = self.IMS.create_platform_device(any_old(RT.PlatformDevice))

            # data product creation
            dp_obj = any_old(RT.DataProduct, {"temporal_domain":tdom, "spatial_domain": sdom})
            dp_id = self.DP.create_data_product(data_product=dp_obj, stream_definition_id=raw_stream_def_id)
            self.DAMS.assign_data_product(input_resource_id=platform_device_id, data_product_id=dp_id)
            self.DP.activate_data_product_persistence(data_product_id=dp_id)

            # assignments
            self.RR2.assign_platform_agent_instance_to_platform_device(platform_agent_instance_id, platform_device_id)
            self.RR2.assign_platform_agent_to_platform_agent_instance(platform_agent_id, platform_agent_instance_id)
            self.RR2.assign_platform_device_to_org_with_has_resource(platform_agent_instance_id, org_id)

            return platform_agent_instance_id, platform_agent_id, platform_device_id


        def _make_instrument_agent_structure(agent_config=None):
            if None is agent_config: agent_config = {}

            # instance creation
            instrument_agent_instance_obj = any_old(RT.InstrumentAgentInstance, {"startup_config": inst_startup_config,
                                                                                 'alerts': generic_alerts_config})
            instrument_agent_instance_obj.agent_config = agent_config
            instrument_agent_instance_id = self.IMS.create_instrument_agent_instance(instrument_agent_instance_obj)

            # agent creation
            raw_config = StreamConfiguration(stream_name='raw',
                                             parameter_dictionary_name='ctd_raw_param_dict',
                                             records_per_granule=2,
                                             granule_publish_rate=5 )
            instrument_agent_obj = any_old(RT.InstrumentAgent, {"stream_configurations":[raw_config]})
            instrument_agent_id = self.IMS.create_instrument_agent(instrument_agent_obj)

            # device creation
            instrument_device_id = self.IMS.create_instrument_device(any_old(RT.InstrumentDevice))

            # data product creation
            dp_obj = any_old(RT.DataProduct, {"temporal_domain":tdom, "spatial_domain": sdom})
            dp_id = self.DP.create_data_product(data_product=dp_obj, stream_definition_id=raw_stream_def_id)
            self.DAMS.assign_data_product(input_resource_id=instrument_device_id, data_product_id=dp_id)
            self.DP.activate_data_product_persistence(data_product_id=dp_id)

            # assignments
            self.RR2.assign_instrument_agent_instance_to_instrument_device(instrument_agent_instance_id, instrument_device_id)
            self.RR2.assign_instrument_agent_to_instrument_agent_instance(instrument_agent_id, instrument_agent_instance_id)
            self.RR2.assign_instrument_device_to_org_with_has_resource(instrument_agent_instance_id, org_id)

            return instrument_agent_instance_id, instrument_agent_id, instrument_device_id



        # can't do anything without an agent instance obj
        log.debug("Testing that preparing a launcher without agent instance raises an error")
        self.assertRaises(AssertionError, pconfig_builder.prepare, will_launch=False)

        log.debug("Making the structure for a platform agent, which will be the child")
        platform_agent_instance_child_id, _, platform_device_child_id  = _make_platform_agent_structure()
        platform_agent_instance_child_obj = self.RR2.read(platform_agent_instance_child_id)

        log.debug("Preparing a valid agent instance launch, for config only")
        pconfig_builder.set_agent_instance_object(platform_agent_instance_child_obj)
        child_config = pconfig_builder.prepare(will_launch=False)
        verify_child_config(child_config, platform_device_child_id)


        log.debug("Making the structure for a platform agent, which will be the parent")
        platform_agent_instance_parent_id, _, platform_device_parent_id  = _make_platform_agent_structure()
        platform_agent_instance_parent_obj = self.RR2.read(platform_agent_instance_parent_id)

        log.debug("Testing child-less parent as a child config")
        pconfig_builder.set_agent_instance_object(platform_agent_instance_parent_obj)
        parent_config = pconfig_builder.prepare(will_launch=False)
        verify_child_config(parent_config, platform_device_parent_id)

        log.debug("assigning child platform to parent")
        self.RR2.assign_platform_device_to_platform_device(platform_device_child_id, platform_device_parent_id)
        child_device_ids = self.RR2.find_platform_device_ids_of_device(platform_device_parent_id)
        self.assertNotEqual(0, len(child_device_ids))

        log.debug("Testing parent + child as parent config")
        pconfig_builder.set_agent_instance_object(platform_agent_instance_parent_obj)
        parent_config = pconfig_builder.prepare(will_launch=False)
        verify_parent_config(parent_config, platform_device_parent_id, platform_device_child_id)


        log.debug("making the structure for an instrument agent")
        instrument_agent_instance_id, _, instrument_device_id = _make_instrument_agent_structure()
        instrument_agent_instance_obj = self.RR2.read(instrument_agent_instance_id)

        log.debug("Testing instrument config")
        iconfig_builder.set_agent_instance_object(instrument_agent_instance_obj)
        instrument_config = iconfig_builder.prepare(will_launch=False)
        verify_instrument_config(instrument_config, instrument_device_id)

        log.debug("assigning instrument to platform")
        self.RR2.assign_instrument_device_to_platform_device(instrument_device_id, platform_device_child_id)
        child_device_ids = self.RR2.find_instrument_device_ids_of_device(platform_device_child_id)
        self.assertNotEqual(0, len(child_device_ids))

        log.debug("Testing entire config")
        pconfig_builder.set_agent_instance_object(platform_agent_instance_parent_obj)
        full_config = pconfig_builder.prepare(will_launch=False)
        verify_parent_config(full_config, platform_device_parent_id, platform_device_child_id, instrument_device_id)
class TestResourceRegistryAttachments(IonIntegrationTestCase):

    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()
        #container = Container()
        #print 'starting container'
        #container.start()
        #print 'started container'

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        self.RR  = ResourceRegistryServiceClient(node=self.container.node)

        print 'started services'

    def test_resource_registry_blob_sanity(self):
        resource_id, _ = self.RR.create(IonObject(RT.Resource, name="foo"))

        MY_CONTENT = "the quick brown fox etc etc etc"

        #save
        att_id = self.RR.create_attachment(resource_id,  IonObject(RT.Attachment,
                                                          name="test.txt",
                                                          content=MY_CONTENT,
                                                          content_type="text/plain",
                                                          keywords=["test1", "test2"],
                                                          attachment_type=AttachmentType.BLOB))
        
        #load
        attachment = self.RR.read_attachment(att_id, include_content=True)
        self.assertEqual("test.txt", attachment.name)
        self.assertEqual("text/plain", attachment.content_type)
        self.assertIn("test1", attachment.keywords)
        self.assertIn("test2", attachment.keywords)

        #content has changed; it's base64-encoded from what we put in
        self.assertEqual(MY_CONTENT, attachment.content)

        obj = self.RR.read(resource_id)
        self.assertEqual(obj.name, "foo")
        obj.name = "TheDudeAbides"
        obj = self.RR.update(obj)
        obj = self.RR.read(resource_id)
        self.assertEqual(obj.name, "TheDudeAbides")

        att = self.RR.find_attachments(resource_id)
        self.assertNotEqual(att, None)


        actor_identity_obj = IonObject("ActorIdentity", name="name")
        actor_identity_obj_id, actor_identity_obj_rev = self.RR.create(actor_identity_obj)
        user_info_obj = IonObject("UserInfo", name="name")
        user_info_obj_id, user_info_obj_rev = self.RR.create(user_info_obj)
        assoc_id, assoc_rev = self.RR.create_association(actor_identity_obj_id, PRED.hasInfo, user_info_obj_id)
        self.assertNotEqual(assoc_id, None)

        find_assoc = self.RR.find_associations(actor_identity_obj_id, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(find_assoc[0]._id == assoc_id)
        subj = self.RR.find_subjects(RT.ActorIdentity, PRED.hasInfo, user_info_obj_id, True)

        res_obj1 = self.RR.read_object(actor_identity_obj_id, PRED.hasInfo, RT.UserInfo)
        self.assertEquals(res_obj1._id, user_info_obj_id)

        self.RR.delete_association(assoc_id)
        self.RR.delete_attachment(att_id)
        self.RR.delete(resource_id)
class TestInstrumentManagementServiceAgents(IonIntegrationTestCase):

    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()
        #container = Container()
        #print 'starting container'
        #container.start()
        #print 'started container'

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        self.RR  = ResourceRegistryServiceClient(node=self.container.node)
        self.IMS = InstrumentManagementServiceClient(node=self.container.node)

        print 'started services'

    @unittest.skip('this test just for debugging setup')
    def test_just_the_setup(self):
        return

    def test_register_instrument_agent(self):

        #test ssh-ability
        cfg_host = CFG.service.instrument_management.driver_release_host #'amoeaba.ucsd.edu'
        cfg_user = pwd.getpwuid(os.getuid())[0]

        remotehost = "%s@%s" % (cfg_user, cfg_host)

        ssh_retval = subprocess.call(["ssh", "-o", "PasswordAuthentication=no", 
                                      remotehost, "-f", "true"])
        
        if 0 != ssh_retval:
            raise unittest.SkipTest("SSH/SCP credentials to %s didn't work" % remotehost)



        inst_agent_id = self.IMS.create_instrument_agent(any_old(RT.InstrumentAgent))
        inst_model_id = self.IMS.create_instrument_model(any_old(RT.InstrumentModel))

        self.IMS.assign_instrument_model_to_instrument_agent(inst_model_id, inst_agent_id)

        self.IMS.register_instrument_agent(inst_agent_id, BASE64_EGG, BASE64_ZIPFILE)

        attachments, _ = self.RR.find_objects(inst_agent_id, PRED.hasAttachment, RT.Attachment, True)

        self.assertEqual(len(attachments), 4)

        for a_id in attachments:

            a = self.RR.read_attachment(a_id)

            parts = string.split(a.name, ".")
            
            if "txt" == parts[1]:
                self.assertEqual("text/plain", a.content_type)
                self.assertIn(parts[0], a.keywords)
                self.assertEqual(a.content, (parts[0] * 3) + "\n")

        log.info("L4-CI-SA-RQ-148")

        return
class TestInstrumentManagementServiceIntegration(IonIntegrationTestCase):

    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()
        #container = Container()
        #print 'starting container'
        #container.start()
        #print 'started container'

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        self.RR   = ResourceRegistryServiceClient(node=self.container.node)
        self.IMS  = InstrumentManagementServiceClient(node=self.container.node)
        self.IDS  = IdentityManagementServiceClient(node=self.container.node)
        self.PSC  = PubsubManagementServiceClient(node=self.container.node)
        self.DP   = DataProductManagementServiceClient(node=self.container.node)
        self.DAMS = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.DSC  = DatasetManagementServiceClient(node=self.container.node)
        self.PDC  = ProcessDispatcherServiceClient(node=self.container.node)

        self.RR2 = EnhancedResourceRegistryClient(self.RR)

        print 'started services'

#    @unittest.skip('this test just for debugging setup')
#    def test_just_the_setup(self):
#        return

    @attr('EXT')
    def test_resources_associations_extensions(self):
        """
        create one of each resource and association used by IMS
        to guard against problems in ion-definitions
        """
        
        #stuff we control
        instrument_agent_instance_id, _ =  self.RR.create(any_old(RT.InstrumentAgentInstance))
        instrument_agent_id, _ =           self.RR.create(any_old(RT.InstrumentAgent))
        instrument_model_id, _ =           self.RR.create(any_old(RT.InstrumentModel))
        instrument_device_id, _ =          self.RR.create(any_old(RT.InstrumentDevice))
        platform_agent_instance_id, _ =    self.RR.create(any_old(RT.PlatformAgentInstance))
        platform_agent_id, _ =             self.RR.create(any_old(RT.PlatformAgent))
        platform_device_id, _ =            self.RR.create(any_old(RT.PlatformDevice))
        platform_model_id, _ =             self.RR.create(any_old(RT.PlatformModel))
        sensor_device_id, _ =              self.RR.create(any_old(RT.SensorDevice))
        sensor_model_id, _ =               self.RR.create(any_old(RT.SensorModel))

        #stuff we associate to
        data_producer_id, _      = self.RR.create(any_old(RT.DataProducer))
        org_id, _ =                self.RR.create(any_old(RT.Org))

        #instrument_agent_instance_id #is only a target
        
        #instrument_agent
        self.RR.create_association(instrument_agent_id, PRED.hasModel, instrument_model_id)
        self.RR.create_association(instrument_agent_instance_id, PRED.hasAgentDefinition, instrument_agent_id)

        #instrument_device
        self.RR.create_association(instrument_device_id, PRED.hasModel, instrument_model_id)
        self.RR.create_association(instrument_device_id, PRED.hasAgentInstance, instrument_agent_instance_id)
        self.RR.create_association(instrument_device_id, PRED.hasDataProducer, data_producer_id)
        self.RR.create_association(instrument_device_id, PRED.hasDevice, sensor_device_id)
        self.RR.create_association(org_id, PRED.hasResource, instrument_device_id)


        instrument_model_id #is only a target

        platform_agent_instance_id #is only a target
        
        #platform_agent
        self.RR.create_association(platform_agent_id, PRED.hasModel, platform_model_id)
        self.RR.create_association(platform_agent_instance_id, PRED.hasAgentDefinition, platform_agent_id)

        #platform_device
        self.RR.create_association(platform_device_id, PRED.hasModel, platform_model_id)
        self.RR.create_association(platform_device_id, PRED.hasAgentInstance, platform_agent_instance_id)
        self.RR.create_association(platform_device_id, PRED.hasDevice, instrument_device_id)

        platform_model_id #is only a target

        #sensor_device
        self.RR.create_association(sensor_device_id, PRED.hasModel, sensor_model_id)
        self.RR.create_association(sensor_device_id, PRED.hasDevice, instrument_device_id)

        sensor_model_id #is only a target

        #create a parsed product for this instrument output
        tdom, sdom = time_series_domain()
        tdom = tdom.dump()
        sdom = sdom.dump()
        dp_obj = IonObject(RT.DataProduct,
            name='the parsed data',
            description='ctd stream test',
            processing_level_code='Parsed_Canonical',
            temporal_domain = tdom,
            spatial_domain = sdom)
        pdict_id = self.DSC.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        parsed_stream_def_id = self.PSC.create_stream_definition(name='parsed', parameter_dictionary_id=pdict_id)
        data_product_id1 = self.DP.create_data_product(data_product=dp_obj, stream_definition_id=parsed_stream_def_id)
        log.debug( 'new dp_id = %s', data_product_id1)

        self.DAMS.assign_data_product(input_resource_id=instrument_device_id, data_product_id=data_product_id1)


        def addInstOwner(inst_id, subject):

            actor_identity_obj = any_old(RT.ActorIdentity, {"name": subject})
            user_id = self.IDS.create_actor_identity(actor_identity_obj)
            user_info_obj = any_old(RT.UserInfo)
            user_info_id = self.IDS.create_user_info(user_id, user_info_obj)

            self.RR.create_association(inst_id, PRED.hasOwner, user_id)


        #Testing multiple instrument owners
        addInstOwner(instrument_device_id, "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Roger Unwin A254")
        addInstOwner(instrument_device_id, "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Bob Cumbers A256")

        extended_instrument = self.IMS.get_instrument_device_extension(instrument_device_id)

        self.assertEqual(instrument_device_id, extended_instrument._id)
        self.assertEqual(len(extended_instrument.owners), 2)
        self.assertEqual(extended_instrument.instrument_model._id, instrument_model_id)

        # Verify that computed attributes exist for the extended instrument
        self.assertIsInstance(extended_instrument.computed.firmware_version, ComputedFloatValue)
        self.assertIsInstance(extended_instrument.computed.last_data_received_datetime, ComputedFloatValue)
        self.assertIsInstance(extended_instrument.computed.last_calibration_datetime, ComputedFloatValue)
        self.assertIsInstance(extended_instrument.computed.uptime, ComputedStringValue)

        self.assertIsInstance(extended_instrument.computed.power_status_roll_up, ComputedIntValue)
        self.assertIsInstance(extended_instrument.computed.communications_status_roll_up, ComputedIntValue)
        self.assertIsInstance(extended_instrument.computed.data_status_roll_up, ComputedIntValue)
        self.assertIsInstance(extended_instrument.computed.location_status_roll_up, ComputedIntValue)

        log.debug("extended_instrument.computed: %s", extended_instrument.computed)

        #check model
        inst_model_obj = self.RR.read(instrument_model_id)
        self.assertEqual(inst_model_obj.name, extended_instrument.instrument_model.name)

        #check agent instance
        inst_agent_instance_obj = self.RR.read(instrument_agent_instance_id)
        self.assertEqual(inst_agent_instance_obj.name, extended_instrument.agent_instance.name)

        #check agent
        inst_agent_obj = self.RR.read(instrument_agent_id)
        #compound assoc return list of lists so check the first element
        self.assertEqual(inst_agent_obj.name, extended_instrument.instrument_agent[0].name)

        #check platform device
        plat_device_obj = self.RR.read(platform_device_id)
        self.assertEqual(plat_device_obj.name, extended_instrument.platform_device.name)

        extended_platform = self.IMS.get_platform_device_extension(platform_device_id)

        self.assertEqual(1, len(extended_platform.instrument_devices))
        self.assertEqual(instrument_device_id, extended_platform.instrument_devices[0]._id)
        self.assertEqual(1, len(extended_platform.instrument_models))
        self.assertEqual(instrument_model_id, extended_platform.instrument_models[0]._id)

        #check sensor devices
        self.assertEqual(1, len(extended_instrument.sensor_devices))

        #check data_product_parameters_set
        self.assertEqual(ComputedValueAvailability.PROVIDED,
                         extended_instrument.computed.data_product_parameters_set.status)
        self.assertTrue( 'Parsed_Canonical' in extended_instrument.computed.data_product_parameters_set.value)
        # the ctd parameters should include 'temp'
        self.assertTrue( 'temp' in extended_instrument.computed.data_product_parameters_set.value['Parsed_Canonical'])

        #none of these will work because there is no agent
        self.assertEqual(ComputedValueAvailability.NOTAVAILABLE,
                         extended_instrument.computed.firmware_version.status)
#        self.assertEqual(ComputedValueAvailability.NOTAVAILABLE,
#                         extended_instrument.computed.operational_state.status)
#        self.assertEqual(ComputedValueAvailability.PROVIDED,
#                         extended_instrument.computed.power_status_roll_up.status)
#        self.assertEqual(ComputedValueAvailability.PROVIDED,
#                         extended_instrument.computed.communications_status_roll_up.status)
#        self.assertEqual(ComputedValueAvailability.PROVIDED,
#                         extended_instrument.computed.data_status_roll_up.status)
#        self.assertEqual(StatusType.STATUS_OK,
#                        extended_instrument.computed.data_status_roll_up.value)
#        self.assertEqual(ComputedValueAvailability.PROVIDED,
#                         extended_instrument.computed.location_status_roll_up.status)

#        self.assertEqual(ComputedValueAvailability.PROVIDED,
#                         extended_instrument.computed.recent_events.status)
#        self.assertEqual([], extended_instrument.computed.recent_events.value)


        # cleanup
        c = DotDict()
        c.resource_registry = self.RR
        self.RR2.pluck(instrument_agent_id)
        self.RR2.pluck(instrument_model_id)
        self.RR2.pluck(instrument_device_id)
        self.RR2.pluck(platform_agent_id)
        self.IMS.force_delete_instrument_agent(instrument_agent_id)
        self.IMS.force_delete_instrument_model(instrument_model_id)
        self.IMS.force_delete_instrument_device(instrument_device_id)
        self.IMS.force_delete_platform_agent_instance(platform_agent_instance_id)
        self.IMS.force_delete_platform_agent(platform_agent_id)
        self.IMS.force_delete_platform_device(platform_device_id)
        self.IMS.force_delete_platform_model(platform_model_id)
        self.IMS.force_delete_sensor_device(sensor_device_id)
        self.IMS.force_delete_sensor_model(sensor_model_id)

        #stuff we associate to
        self.RR.delete(data_producer_id)
        self.RR.delete(org_id)



    def test_custom_attributes(self):
        """
        Test assignment of custom attributes
        """

        instrument_model_id, _ =           self.RR.create(any_old(RT.InstrumentModel,
                {"custom_attributes":
                         {"favorite_color": "attr desc goes here"}
            }))
        instrument_device_id, _ =          self.RR.create(any_old(RT.InstrumentDevice,
                {"custom_attributes":
                         {"favorite_color": "red",
                          "bogus_attr": "should raise warning"
                     }
            }))

        self.IMS.assign_instrument_model_to_instrument_device(instrument_model_id, instrument_device_id)

        # cleanup
        self.IMS.force_delete_instrument_device(instrument_device_id)
        self.IMS.force_delete_instrument_model(instrument_model_id)






    def _get_datastore(self, dataset_id):
        dataset = self.DSC.read_dataset(dataset_id)
        datastore_name = dataset.datastore_name
        datastore = self.container.datastore_manager.get_datastore(datastore_name, DataStore.DS_PROFILE.SCIDATA)
        return datastore




    def test_resource_state_save_restore(self):

        # Create InstrumentModel
        instModel_obj = IonObject(RT.InstrumentModel,
                                  name='SBE37IMModel',
                                  description="SBE37IMModel")
        instModel_id = self.IMS.create_instrument_model(instModel_obj)
        log.debug( 'new InstrumentModel id = %s ', instModel_id)

        # Create InstrumentAgent
        raw_config = StreamConfiguration(stream_name='raw', parameter_dictionary_name='ctd_raw_param_dict', records_per_granule=2, granule_publish_rate=5 )
        parsed_config = StreamConfiguration(stream_name='parsed', parameter_dictionary_name='ctd_parsed_param_dict', records_per_granule=2, granule_publish_rate=5 )
        instAgent_obj = IonObject(RT.InstrumentAgent,
                                  name='agent007',
                                  description="SBE37IMAgent",
                                  driver_uri="http://sddevrepo.oceanobservatories.org/releases/seabird_sbe37smb_ooicore-0.0.1-py2.7.egg",
                                  stream_configurations = [raw_config, parsed_config] )
        instAgent_id = self.IMS.create_instrument_agent(instAgent_obj)
        log.debug( 'new InstrumentAgent id = %s', instAgent_id)

        self.IMS.assign_instrument_model_to_instrument_agent(instModel_id, instAgent_id)

        # Create InstrumentDevice
        log.debug('test_activateInstrumentSample: Create instrument resource to represent the SBE37 '
        + '(SA Req: L4-CI-SA-RQ-241) ')
        instDevice_obj = IonObject(RT.InstrumentDevice,
                                   name='SBE37IMDevice',
                                   description="SBE37IMDevice",
                                   serial_number="12345" )
        instDevice_id = self.IMS.create_instrument_device(instrument_device=instDevice_obj)
        self.IMS.assign_instrument_model_to_instrument_device(instModel_id, instDevice_id)

        log.debug("test_activateInstrumentSample: new InstrumentDevice id = %s    (SA Req: L4-CI-SA-RQ-241) ",
                  instDevice_id)

        port_agent_config = {
            'device_addr':  CFG.device.sbe37.host,
            'device_port':  CFG.device.sbe37.port,
            'process_type': PortAgentProcessType.UNIX,
            'binary_path': "port_agent",
            'port_agent_addr': 'localhost',
            'command_port': CFG.device.sbe37.port_agent_cmd_port,
            'data_port': CFG.device.sbe37.port_agent_data_port,
            'log_level': 5,
            'type': PortAgentType.ETHERNET
        }

        instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='SBE37IMAgentInstance',
                                          description="SBE37IMAgentInstance",
                                          port_agent_config = port_agent_config)


        instAgentInstance_id = self.IMS.create_instrument_agent_instance(instAgentInstance_obj,
                                                                               instAgent_id,
                                                                               instDevice_id)

        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()


        spdict_id = self.DSC.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        parsed_stream_def_id = self.PSC.create_stream_definition(name='parsed', parameter_dictionary_id=spdict_id)

        rpdict_id = self.DSC.read_parameter_dictionary_by_name('ctd_raw_param_dict', id_only=True)
        raw_stream_def_id = self.PSC.create_stream_definition(name='raw', parameter_dictionary_id=rpdict_id)


        #-------------------------------
        # Create Raw and Parsed Data Products for the device
        #-------------------------------

        dp_obj = IonObject(RT.DataProduct,
                           name='the parsed data',
                           description='ctd stream test',
                           temporal_domain = tdom,
                           spatial_domain = sdom)

        data_product_id1 = self.DP.create_data_product(data_product=dp_obj,
                                                       stream_definition_id=parsed_stream_def_id)
                                                       
        log.debug( 'new dp_id = %s', data_product_id1)

        self.DAMS.assign_data_product(input_resource_id=instDevice_id, data_product_id=data_product_id1)
        self.DP.activate_data_product_persistence(data_product_id=data_product_id1)



        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.RR.find_objects(data_product_id1, PRED.hasStream, None, True)
        log.debug( 'Data product streams1 = %s', stream_ids)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        dataset_ids, _ = self.RR.find_objects(data_product_id1, PRED.hasDataset, RT.Dataset, True)
        log.debug( 'Data set for data_product_id1 = %s', dataset_ids[0])
        self.parsed_dataset = dataset_ids[0]
        #create the datastore at the beginning of each int test that persists data



        dp_obj = IonObject(RT.DataProduct,
                           name='the raw data',
                           description='raw stream test',
                           temporal_domain = tdom,
                           spatial_domain = sdom)

        data_product_id2 = self.DP.create_data_product(data_product=dp_obj,
                                                       stream_definition_id=raw_stream_def_id)
        log.debug( 'new dp_id = %s', str(data_product_id2))

        self.DAMS.assign_data_product(input_resource_id=instDevice_id, data_product_id=data_product_id2)

        self.DP.activate_data_product_persistence(data_product_id=data_product_id2)

        # spin up agent
        self.IMS.start_instrument_agent_instance(instrument_agent_instance_id=instAgentInstance_id)


        self.addCleanup(self.IMS.stop_instrument_agent_instance,
                        instrument_agent_instance_id=instAgentInstance_id)

        #wait for start
        instance_obj = self.IMS.read_instrument_agent_instance(instAgentInstance_id)
        gate = ProcessStateGate(self.PDC.read_process,
                                instance_obj.agent_process_id,
                                ProcessStateEnum.RUNNING)
        self.assertTrue(gate.await(30), "The instrument agent instance (%s) did not spawn in 30 seconds" %
                                        instance_obj.agent_process_id)


        # take snapshot of config
        snap_id = self.IMS.save_resource_state(instDevice_id, "xyzzy snapshot")
        snap_obj = self.RR.read_attachment(snap_id, include_content=True)
        print "Saved config:"
        print snap_obj.content

        #modify config
        instance_obj.driver_config["comms_config"] = "BAD_DATA"
        self.RR.update(instance_obj)

        #restore config
        self.IMS.restore_resource_state(instDevice_id, snap_id)
        instance_obj = self.RR.read(instAgentInstance_id)
        self.assertNotEqual("BAD_DATA", instance_obj.driver_config["comms_config"])

        
        self.DP.delete_data_product(data_product_id1)
        self.DP.delete_data_product(data_product_id2)



    def test_agent_instance_config(self):
        """
        Verify that agent configurations are being built properly
        """
        clients = DotDict()
        clients.resource_registry  = self.RR
        clients.pubsub_management  = self.PSC
        clients.dataset_management = self.DSC
        pconfig_builder = PlatformAgentConfigurationBuilder(clients)
        iconfig_builder = InstrumentAgentConfigurationBuilder(clients)


        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()

        org_id = self.RR2.create(any_old(RT.Org))

        inst_startup_config = {'startup': 'config'}

        required_config_keys = [
            'org_name',
            'device_type',
            'agent',
            'driver_config',
            'stream_config',
            'startup_config',
            'alarm_defs',
            'children']



        def verify_instrument_config(config, device_id):
            for key in required_config_keys:
                self.assertIn(key, config)
            self.assertEqual('Org_1', config['org_name'])
            self.assertEqual(RT.InstrumentDevice, config['device_type'])
            self.assertIn('driver_config', config)
            driver_config = config['driver_config']
            expected_driver_fields = {'process_type': ('ZMQPyClassDriverLauncher',),
                                      }
            for k, v in expected_driver_fields.iteritems():
                self.assertIn(k, driver_config)
                self.assertEqual(v, driver_config[k])
            self.assertEqual

            self.assertEqual({'resource_id': device_id}, config['agent'])
            self.assertEqual(inst_startup_config, config['startup_config'])
            self.assertIn('stream_config', config)
            for key in ['alarm_defs', 'children']:
                self.assertEqual({}, config[key])


        def verify_child_config(config, device_id, inst_device_id=None):
            for key in required_config_keys:
                self.assertIn(key, config)
            self.assertEqual('Org_1', config['org_name'])
            self.assertEqual(RT.PlatformDevice, config['device_type'])
            self.assertEqual({'process_type': ('ZMQPyClassDriverLauncher',)}, config['driver_config'])
            self.assertEqual({'resource_id': device_id}, config['agent'])
            self.assertIn('stream_config', config)

            if None is inst_device_id:
                for key in ['alarm_defs', 'children', 'startup_config']:
                    self.assertEqual({}, config[key])
            else:
                for key in ['alarm_defs', 'startup_config']:
                    self.assertEqual({}, config[key])

                self.assertIn(inst_device_id, config['children'])
                verify_instrument_config(config['children'][inst_device_id], inst_device_id)


        def verify_parent_config(config, parent_device_id, child_device_id, inst_device_id=None):
            for key in required_config_keys:
                self.assertIn(key, config)
            self.assertEqual('Org_1', config['org_name'])
            self.assertEqual(RT.PlatformDevice, config['device_type'])
            self.assertEqual({'process_type': ('ZMQPyClassDriverLauncher',)}, config['driver_config'])
            self.assertEqual({'resource_id': parent_device_id}, config['agent'])
            self.assertIn('stream_config', config)
            for key in ['alarm_defs', 'startup_config']:
                self.assertEqual({}, config[key])

            self.assertIn(child_device_id, config['children'])
            verify_child_config(config['children'][child_device_id], child_device_id, inst_device_id)






        rpdict_id = self.DSC.read_parameter_dictionary_by_name('ctd_raw_param_dict', id_only=True)
        raw_stream_def_id = self.PSC.create_stream_definition(name='raw', parameter_dictionary_id=rpdict_id)
        #todo: create org and figure out which agent resource needs to get assigned to it


        def _make_platform_agent_structure(agent_config=None):
            if None is agent_config: agent_config = {}

            # instance creation
            platform_agent_instance_obj = any_old(RT.PlatformAgentInstance)
            platform_agent_instance_obj.agent_config = agent_config
            platform_agent_instance_id = self.IMS.create_platform_agent_instance(platform_agent_instance_obj)

            # agent creation
            raw_config = StreamConfiguration(stream_name='raw', parameter_dictionary_name='ctd_raw_param_dict', records_per_granule=2, granule_publish_rate=5 )
            platform_agent_obj = any_old(RT.PlatformAgent, {"stream_configurations":[raw_config]})
            platform_agent_id = self.IMS.create_platform_agent(platform_agent_obj)

            # device creation
            platform_device_id = self.IMS.create_platform_device(any_old(RT.PlatformDevice))

            # data product creation
            dp_obj = any_old(RT.DataProduct, {"temporal_domain":tdom, "spatial_domain": sdom})
            dp_id = self.DP.create_data_product(data_product=dp_obj, stream_definition_id=raw_stream_def_id)
            self.DAMS.assign_data_product(input_resource_id=platform_device_id, data_product_id=dp_id)
            self.DP.activate_data_product_persistence(data_product_id=dp_id)

            # assignments
            self.RR2.assign_platform_agent_instance_to_platform_device(platform_agent_instance_id, platform_device_id)
            self.RR2.assign_platform_agent_to_platform_agent_instance(platform_agent_id, platform_agent_instance_id)
            self.RR2.assign_platform_device_to_org_with_has_resource(platform_agent_instance_id, org_id)

            return platform_agent_instance_id, platform_agent_id, platform_device_id


        def _make_instrument_agent_structure(agent_config=None):
            if None is agent_config: agent_config = {}

            # instance creation
            instrument_agent_instance_obj = any_old(RT.InstrumentAgentInstance, {"startup_config": inst_startup_config})
            instrument_agent_instance_obj.agent_config = agent_config
            instrument_agent_instance_id = self.IMS.create_instrument_agent_instance(instrument_agent_instance_obj)

            # agent creation
            raw_config = StreamConfiguration(stream_name='raw', parameter_dictionary_name='ctd_raw_param_dict', records_per_granule=2, granule_publish_rate=5 )
            instrument_agent_obj = any_old(RT.InstrumentAgent, {"stream_configurations":[raw_config]})
            instrument_agent_id = self.IMS.create_instrument_agent(instrument_agent_obj)

            # device creation
            instrument_device_id = self.IMS.create_instrument_device(any_old(RT.InstrumentDevice))

            # data product creation
            dp_obj = any_old(RT.DataProduct, {"temporal_domain":tdom, "spatial_domain": sdom})
            dp_id = self.DP.create_data_product(data_product=dp_obj, stream_definition_id=raw_stream_def_id)
            self.DAMS.assign_data_product(input_resource_id=instrument_device_id, data_product_id=dp_id)
            self.DP.activate_data_product_persistence(data_product_id=dp_id)

            # assignments
            self.RR2.assign_instrument_agent_instance_to_instrument_device(instrument_agent_instance_id, instrument_device_id)
            self.RR2.assign_instrument_agent_to_instrument_agent_instance(instrument_agent_id, instrument_agent_instance_id)
            self.RR2.assign_instrument_device_to_org_with_has_resource(instrument_agent_instance_id, org_id)

            return instrument_agent_instance_id, instrument_agent_id, instrument_device_id



        # can't do anything without an agent instance obj
        log.debug("Testing that preparing a launcher without agent instance raises an error")
        self.assertRaises(AssertionError, pconfig_builder.prepare, will_launch=False)

        log.debug("Making the structure for a platform agent, which will be the child")
        platform_agent_instance_child_id, _, platform_device_child_id  = _make_platform_agent_structure()
        platform_agent_instance_child_obj = self.RR2.read(platform_agent_instance_child_id)

        log.debug("Preparing a valid agent instance launch, for config only")
        pconfig_builder.set_agent_instance_object(platform_agent_instance_child_obj)
        child_config = pconfig_builder.prepare(will_launch=False)
        verify_child_config(child_config, platform_device_child_id)


        log.debug("Making the structure for a platform agent, which will be the parent")
        platform_agent_instance_parent_id, _, platform_device_parent_id  = _make_platform_agent_structure()
        platform_agent_instance_parent_obj = self.RR2.read(platform_agent_instance_parent_id)

        log.debug("Testing child-less parent as a child config")
        pconfig_builder.set_agent_instance_object(platform_agent_instance_parent_obj)
        parent_config = pconfig_builder.prepare(will_launch=False)
        verify_child_config(parent_config, platform_device_parent_id)

        log.debug("assigning child platform to parent")
        self.RR2.assign_platform_device_to_platform_device(platform_device_child_id, platform_device_parent_id)
        child_device_ids = self.RR2.find_platform_device_ids_of_device(platform_device_parent_id)
        self.assertNotEqual(0, len(child_device_ids))

        log.debug("Testing parent + child as parent config")
        pconfig_builder.set_agent_instance_object(platform_agent_instance_parent_obj)
        parent_config = pconfig_builder.prepare(will_launch=False)
        verify_parent_config(parent_config, platform_device_parent_id, platform_device_child_id)


        log.debug("making the structure for an instrument agent")
        instrument_agent_instance_id, _, instrument_device_id = _make_instrument_agent_structure()
        instrument_agent_instance_obj = self.RR2.read(instrument_agent_instance_id)

        log.debug("Testing instrument config")
        iconfig_builder.set_agent_instance_object(instrument_agent_instance_obj)
        instrument_config = iconfig_builder.prepare(will_launch=False)
        verify_instrument_config(instrument_config, instrument_device_id)

        log.debug("assigning instrument to platform")
        self.RR2.assign_instrument_device_to_platform_device(instrument_device_id, platform_device_child_id)
        child_device_ids = self.RR2.find_instrument_device_ids_of_device(platform_device_child_id)
        self.assertNotEqual(0, len(child_device_ids))

        log.debug("Testing entire config")
        pconfig_builder.set_agent_instance_object(platform_agent_instance_parent_obj)
        full_config = pconfig_builder.prepare(will_launch=False)
        verify_parent_config(full_config, platform_device_parent_id, platform_device_child_id, instrument_device_id)

        #self.fail(parent_config)
        #plauncher.prepare(will_launch=False)


    def sample_nested_platform_agent_instance_config(self):
        """
        for informational purposes
        """

        ret = {'org_name': 'Org_1',
               'alarm_defs': {},
               'driver_config': {'process_type': ('ZMQPyClassDriverLauncher',)},
               'stream_config': {'parameter_dictionary': 'lots of stuff'},
               'agent': {'resource_id': '33e54106c4444444862da082098bc123'},
               'startup_config': {},
               'device_type': 'PlatformDevice',
               'children': {'76a39596eeff4fd5b409c4cb93f0e581':
                                    {'org_name': 'Org_1',
                                     'alarm_defs': {},
                                     'driver_config': {'process_type': ('ZMQPyClassDriverLauncher',)},
                                     'stream_config': {'parameter_dictionary': 'lots of stuff'},
                                     'agent': {'resource_id': '76a39596eeff4fd5b409c4cb93f0e581'},
                                     'startup_config': {},
                                     'device_type': 'PlatformDevice',
                                     'children': {}}}}

        return ret
class TestResourceRegistry(IonIntegrationTestCase):
    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url("res/deploy/r2coi.yml")

        # Now create client to bank service
        self.resource_registry_service = ResourceRegistryServiceClient(node=self.container.node)

    def test_crud(self):
        # Some quick registry tests
        # Can't call new with fields that aren't defined in the object's schema
        with self.assertRaises(TypeError) as cm:
            IonObject("UserInfo", name="name", foo="bar")
        self.assertTrue(cm.exception.message == "__init__() got an unexpected keyword argument 'foo'")

        # Can't call new with fields that aren't defined in the object's schema
        with self.assertRaises(TypeError) as cm:
            IonObject("UserInfo", {"name": "name", "foo": "bar"})
        self.assertTrue(cm.exception.message == "__init__() got an unexpected keyword argument 'foo'")

        # Can't call new with fields that aren't defined in the object's schema
        with self.assertRaises(TypeError) as cm:
            IonObject("UserInfo", {"name": "name"}, foo="bar")
        self.assertTrue(cm.exception.message == "__init__() got an unexpected keyword argument 'foo'")

        # Instantiate an object
        obj = IonObject("UserInfo", name="name")

        # Can set attributes that aren't in the object's schema
        with self.assertRaises(AttributeError) as cm:
            setattr(obj, "foo", "bar")
        self.assertTrue(cm.exception.message == "'UserInfo' object has no attribute 'foo'")

        # Cam't call update with object that hasn't been persisted
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.update(obj)
        self.assertTrue(cm.exception.message.startswith("Object does not have required '_id' or '_rev' attribute"))

        # Persist object and read it back
        obj_id, obj_rev = self.resource_registry_service.create(obj)
        read_obj = self.resource_registry_service.read(obj_id)

        # Cannot create object with _id and _rev fields pre-set
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create(read_obj)
        self.assertTrue(cm.exception.message.startswith("Doc must not have '_id'"))

        # Update object
        read_obj.name = "John Doe"
        self.resource_registry_service.update(read_obj)

        # Update should fail with revision mismatch
        with self.assertRaises(Conflict) as cm:
            self.resource_registry_service.update(read_obj)
        self.assertTrue(cm.exception.message.startswith("Object not based on most current version"))

        # Re-read and update object
        read_obj = self.resource_registry_service.read(obj_id)
        self.resource_registry_service.update(read_obj)

        # Delete object
        self.resource_registry_service.delete(obj_id)

        # Make sure read, update and delete report error
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.read(obj_id)
        self.assertTrue(cm.exception.message.startswith("Object with id"))

        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.update(read_obj)
        self.assertTrue(cm.exception.message.startswith("Object with id"))

        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.delete(obj_id)
        self.assertTrue(cm.exception.message.startswith("Object with id"))

        # Owner creation tests
        user = IonObject("ActorIdentity", name="user")
        uid, _ = self.resource_registry_service.create(user)

        inst = IonObject("InstrumentDevice", name="instrument")
        iid, _ = self.resource_registry_service.create(inst, headers={"ion-actor-id": str(uid)})

        ids, _ = self.resource_registry_service.find_objects(iid, PRED.hasOwner, RT.ActorIdentity, id_only=True)
        self.assertEquals(len(ids), 1)

        assoc = self.resource_registry_service.read(ids[0])
        self.resource_registry_service.delete(iid)

        with self.assertRaises(NotFound) as ex:
            assoc = self.resource_registry_service.read(iid)

    def test_lifecycle(self):
        att = IonObject("InstrumentDevice", name="mine", description="desc")

        rid, rev = self.resource_registry_service.create(att)

        att1 = self.resource_registry_service.read(rid)
        self.assertEquals(att1.name, att.name)
        self.assertEquals(att1.lcstate, LCS.DRAFT_PRIVATE)

        new_state = self.resource_registry_service.execute_lifecycle_transition(rid, LCE.PLAN)
        self.assertEquals(new_state, LCS.PLANNED_PRIVATE)

        att2 = self.resource_registry_service.read(rid)
        self.assertEquals(att2.lcstate, LCS.PLANNED_PRIVATE)

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.execute_lifecycle_transition(rid, LCE.UNANNOUNCE)
        self.assertTrue(
            "type=InstrumentDevice, lcstate=PLANNED_PRIVATE has no transition for event unannounce"
            in cm.exception.message
        )

        new_state = self.resource_registry_service.execute_lifecycle_transition(rid, LCE.DEVELOP)
        self.assertEquals(new_state, LCS.DEVELOPED_PRIVATE)

        self.assertRaises(
            iex.BadRequest,
            self.resource_registry_service.execute_lifecycle_transition,
            resource_id=rid,
            transition_event="NONE##",
        )

        self.resource_registry_service.set_lifecycle_state(rid, LCS.INTEGRATED_PRIVATE)
        att1 = self.resource_registry_service.read(rid)
        self.assertEquals(att1.lcstate, LCS.INTEGRATED_PRIVATE)

    def test_association(self):
        # Instantiate ActorIdentity object
        actor_identity_obj = IonObject("ActorIdentity", name="name")
        actor_identity_obj_id, actor_identity_obj_rev = self.resource_registry_service.create(actor_identity_obj)
        read_actor_identity_obj = self.resource_registry_service.read(actor_identity_obj_id)

        # Instantiate UserInfo object
        user_info_obj = IonObject("UserInfo", name="name")
        user_info_obj_id, user_info_obj_rev = self.resource_registry_service.create(user_info_obj)
        read_user_info_obj = self.resource_registry_service.read(user_info_obj_id)

        # Test create failures
        with self.assertRaises(AttributeError) as cm:
            self.resource_registry_service.create_association(actor_identity_obj_id, PRED.bogus, user_info_obj_id)
        self.assertTrue(cm.exception.message == "bogus")

        # Predicate not provided
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(actor_identity_obj_id, None, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Association must have all elements set")

        # Bad association type
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj_id, PRED.hasInfo, user_info_obj_id, "bogustype"
            )
        self.assertTrue(cm.exception.message == "Unsupported assoc_type: bogustype")

        # Subject id or object not provided
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(None, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Association must have all elements set")

        # Object id or object not provided
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(actor_identity_obj_id, PRED.hasInfo, None)
        self.assertTrue(cm.exception.message == "Association must have all elements set")

        # Bad subject id
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.create_association("bogus", PRED.hasInfo, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Object with id bogus does not exist.")

        # Bad object id
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.create_association(actor_identity_obj_id, PRED.hasInfo, "bogus")
        self.assertTrue(cm.exception.message == "Object with id bogus does not exist.")

        # _id missing from subject
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(actor_identity_obj, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Subject id or rev not available")

        # _id missing from object
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(actor_identity_obj_id, PRED.hasInfo, user_info_obj)
        self.assertTrue(cm.exception.message == "Object id or rev not available")

        # Wrong subject type
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(user_info_obj_id, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Illegal subject type UserInfo for predicate hasInfo")

        # Wrong object type
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj_id, PRED.hasInfo, actor_identity_obj_id
            )
        self.assertTrue(cm.exception.message == "Illegal object type ActorIdentity for predicate hasInfo")

        # Create two different association types between the same subject and predicate
        assoc_id1, assoc_rev1 = self.resource_registry_service.create_association(
            actor_identity_obj_id, PRED.hasInfo, user_info_obj_id
        )

        # Read object, subject
        res_obj1 = self.resource_registry_service.read_object(actor_identity_obj_id, PRED.hasInfo, RT.UserInfo)
        self.assertEquals(res_obj1._id, user_info_obj_id)
        res_obj1 = self.resource_registry_service.read_object(
            actor_identity_obj_id, PRED.hasInfo, RT.UserInfo, id_only=True
        )
        self.assertEquals(res_obj1, user_info_obj_id)
        res_obj2 = self.resource_registry_service.read_subject(RT.ActorIdentity, PRED.hasInfo, user_info_obj_id)
        self.assertEquals(res_obj2._id, actor_identity_obj_id)
        res_obj2 = self.resource_registry_service.read_subject(
            RT.ActorIdentity, PRED.hasInfo, user_info_obj_id, id_only=True
        )
        self.assertEquals(res_obj2, actor_identity_obj_id)

        # Create a similar association to a specific revision
        # TODO: This is not a supported case so far
        assoc_id2, assoc_rev2 = self.resource_registry_service.create_association(
            actor_identity_obj_id, PRED.hasInfo, user_info_obj_id, "H2R"
        )

        # Search for associations (good cases)
        ret1 = self.resource_registry_service.find_associations(actor_identity_obj_id, PRED.hasInfo, user_info_obj_id)
        ret2 = self.resource_registry_service.find_associations(actor_identity_obj_id, PRED.hasInfo)
        ret3 = self.resource_registry_service.find_associations(None, PRED.hasInfo)
        self.assertTrue(len(ret1) == len(ret2) == len(ret3))
        self.assertTrue(ret1[0]._id == ret2[0]._id == ret3[0]._id)

        ret1 = self.resource_registry_service.find_associations(
            actor_identity_obj_id, PRED.hasInfo, user_info_obj_id, None, False
        )
        ret2 = self.resource_registry_service.find_associations(actor_identity_obj_id, PRED.hasInfo, id_only=False)
        ret3 = self.resource_registry_service.find_associations(predicate=PRED.hasInfo, id_only=False)
        self.assertTrue(ret1 == ret2 == ret3)

        # Search for associations (good cases)
        ret1 = self.resource_registry_service.find_associations(
            read_actor_identity_obj, PRED.hasInfo, read_user_info_obj
        )
        ret2 = self.resource_registry_service.find_associations(read_actor_identity_obj, PRED.hasInfo)
        ret3 = self.resource_registry_service.find_associations(None, PRED.hasInfo)
        self.assertTrue(len(ret1) == len(ret2) == len(ret3))
        self.assertTrue(ret1[0]._id == ret2[0]._id == ret3[0]._id)

        ret1 = self.resource_registry_service.find_associations(
            actor_identity_obj_id, PRED.hasInfo, read_user_info_obj, None, True
        )
        ret2 = self.resource_registry_service.find_associations(actor_identity_obj_id, PRED.hasInfo, id_only=True)
        ret3 = self.resource_registry_service.find_associations(predicate=PRED.hasInfo, id_only=True)
        self.assertTrue(ret1 == ret2 == ret3)

        # Search for associations (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_associations(None, None, None)
        self.assertTrue(cm.exception.message == "Illegal parameters")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_associations(actor_identity_obj_id, None, None)
        self.assertTrue(cm.exception.message == "Illegal parameters")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_associations(None, None, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Illegal parameters")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_associations(actor_identity_obj, None, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Object id not available in subject")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_associations(actor_identity_obj_id, None, user_info_obj)
        self.assertTrue(cm.exception.message == "Object id not available in object")

        # Find subjects (good cases)
        subj_ret1 = self.resource_registry_service.find_subjects(RT.ActorIdentity, PRED.hasInfo, user_info_obj_id, True)
        subj_ret2 = self.resource_registry_service.find_subjects(
            RT.ActorIdentity, PRED.hasInfo, read_user_info_obj, True
        )
        self.assertTrue(len(subj_ret1) == len(subj_ret2))
        self.assertTrue(subj_ret1[0] == subj_ret2[0])
        self.assertTrue(subj_ret1[1][0]._id == subj_ret2[1][0]._id)

        subj_ret3 = self.resource_registry_service.find_subjects(None, PRED.hasInfo, user_info_obj_id, True)
        subj_ret4 = self.resource_registry_service.find_subjects(None, None, read_user_info_obj, True)
        self.assertTrue(len(subj_ret3) == len(subj_ret4))
        self.assertTrue(subj_ret3[0] == subj_ret4[0])
        self.assertTrue(subj_ret3[1][0]._id == subj_ret4[1][0]._id)

        subj_ret5 = self.resource_registry_service.find_subjects(None, PRED.hasInfo, user_info_obj_id, False)
        subj_ret6 = self.resource_registry_service.find_subjects(None, None, read_user_info_obj, False)
        self.assertTrue(len(subj_ret5) == len(subj_ret6))
        self.assertTrue(subj_ret5[0][0]._id == subj_ret6[0][0]._id)
        self.assertTrue(subj_ret5[1][0]._id == subj_ret6[1][0]._id)

        # Find subjects (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_subjects(None, None, None)
        self.assertTrue(cm.exception.message == "Must provide object")

        with self.assertRaises(AttributeError) as cm:
            self.resource_registry_service.find_subjects(RT.UserCredentials, PRED.bogus, user_info_obj_id, True)
        self.assertTrue(cm.exception.message == "bogus")

        ret = self.resource_registry_service.find_subjects(RT.UserInfo, PRED.hasCredentials, user_info_obj_id, True)
        self.assertTrue(len(ret[0]) == 0)

        ret = self.resource_registry_service.find_subjects(RT.UserCredentials, PRED.hasInfo, user_info_obj_id, True)
        self.assertTrue(len(ret[0]) == 0)

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_subjects(RT.UserCredentials, PRED.hasInfo, user_info_obj, True)
        self.assertTrue(cm.exception.message == "Object id not available in object")

        # Find objects (good cases)
        subj_ret1 = self.resource_registry_service.find_objects(actor_identity_obj_id, PRED.hasInfo, RT.UserInfo, True)
        subj_ret2 = self.resource_registry_service.find_objects(
            read_actor_identity_obj, PRED.hasInfo, RT.UserInfo, True
        )
        self.assertTrue(len(subj_ret1) == len(subj_ret2))
        self.assertTrue(subj_ret1[0] == subj_ret2[0])
        self.assertTrue(subj_ret1[1][0]._id == subj_ret2[1][0]._id)

        subj_ret3 = self.resource_registry_service.find_objects(actor_identity_obj_id, PRED.hasInfo, None, True)
        subj_ret4 = self.resource_registry_service.find_objects(actor_identity_obj_id, None, None, True)
        self.assertTrue(len(subj_ret3) == len(subj_ret4))
        self.assertTrue(subj_ret3[0] == subj_ret4[0])
        self.assertTrue(subj_ret3[1][0]._id == subj_ret4[1][0]._id)

        subj_ret5 = self.resource_registry_service.find_objects(actor_identity_obj_id, PRED.hasInfo, None, False)
        subj_ret6 = self.resource_registry_service.find_objects(read_actor_identity_obj, None, None, False)
        self.assertTrue(len(subj_ret5) == len(subj_ret6))
        self.assertTrue(subj_ret5[0][0]._id == subj_ret6[0][0]._id)
        self.assertTrue(subj_ret5[1][0]._id == subj_ret6[1][0]._id)

        # Find objects (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_objects(None, None, None)
        self.assertTrue(cm.exception.message == "Must provide subject")

        with self.assertRaises(AttributeError) as cm:
            self.resource_registry_service.find_objects(actor_identity_obj_id, PRED.bogus, RT.UserCredentials, True)
        self.assertTrue(cm.exception.message == "bogus")

        ret = self.resource_registry_service.find_objects(
            actor_identity_obj_id, PRED.hasCredentials, RT.ActorIdentity, True
        )
        self.assertTrue(len(ret[0]) == 0)

        ret = self.resource_registry_service.find_objects(actor_identity_obj_id, PRED.hasInfo, RT.UserCredentials, True)
        self.assertTrue(len(ret[0]) == 0)

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_objects(actor_identity_obj, PRED.hasInfo, RT.UserInfo, True)
        self.assertTrue(cm.exception.message == "Object id not available in subject")

        # Get association (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.get_association(None, None, None)
        self.assertTrue(cm.exception.message == "Illegal parameters")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.get_association(actor_identity_obj_id, None, None)
        self.assertTrue(cm.exception.message == "Illegal parameters")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.get_association(None, None, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Illegal parameters")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.get_association(actor_identity_obj, None, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Object id not available in subject")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.get_association(actor_identity_obj_id, None, user_info_obj)
        self.assertTrue(cm.exception.message == "Object id not available in object")

        # Delete one of the associations
        self.resource_registry_service.delete_association(assoc_id2)

        assoc = self.resource_registry_service.get_association(actor_identity_obj_id, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(assoc._id == assoc_id1)

        # Delete (bad cases)
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.delete_association("bogus")
        self.assertTrue(cm.exception.message == "Object with id bogus does not exist.")

        # Delete other association
        self.resource_registry_service.delete_association(assoc_id1)

        # Delete resources
        self.resource_registry_service.delete(actor_identity_obj_id)
        self.resource_registry_service.delete(user_info_obj_id)

    def test_find_resources(self):
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_resources(RT.UserInfo, LCS.DRAFT, "name", False)
        self.assertTrue(cm.exception.message == "find by name does not support lcstate")

        ret = self.resource_registry_service.find_resources(RT.UserInfo, None, "name", False)
        self.assertTrue(len(ret[0]) == 0)

        # Instantiate an object
        obj = IonObject("InstrumentDevice", name="name")

        # Persist object and read it back
        obj_id, obj_rev = self.resource_registry_service.create(obj)
        read_obj = self.resource_registry_service.read(obj_id)

        ret = self.resource_registry_service.find_resources(RT.InstrumentDevice, None, "name", False)
        self.assertTrue(len(ret[0]) == 1)
        self.assertTrue(ret[0][0]._id == read_obj._id)

        ret = self.resource_registry_service.find_resources(RT.InstrumentDevice, LCS.DRAFT, None, False)
        self.assertTrue(len(ret[0]) == 1)
        self.assertTrue(ret[0][0]._id == read_obj._id)

    # @attr('INT', group='coirr1')
    # class TestResourceRegistry1(IonIntegrationTestCase):
    #
    #    def setUp(self):
    #        # Start container
    #        self._start_container()
    #        self.container.start_rel_from_url('res/deploy/r2coi.yml')
    #
    #        # Now create client to bank service
    #        self.resource_registry_service = ResourceRegistryServiceClient(node=self.container.node)

    def test_attach(self):

        binary = "\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x10\x00\x00\x00\x10\x08\x03\x00\x00\x00(-\x0fS\x00\x00\x00\x03sBIT\x08\x08\x08\xdb\xe1O\xe0\x00\x00\x00~PLTEf3\x00\xfc\xf7\xe0\xee\xcc\x00\xd3\xa0\x00\xcc\x99\x00\xec\xcdc\x9fl\x00\xdd\xb2\x00\xff\xff\xff|I\x00\xf9\xdb\x00\xdd\xb5\x19\xd9\xad\x10\xb6\x83\x00\xf8\xd6\x00\xf2\xc5\x00\xd8\xab\x00n;\x00\xff\xcc\x00\xd6\xa4\t\xeb\xb8\x00\x83Q\x00\xadz\x00\xff\xde\x00\xff\xd6\x00\xd6\xa3\x00\xdf\xaf\x00\xde\xad\x10\xbc\x8e\x00\xec\xbe\x00\xec\xd4d\xff\xe3\x00tA\x00\xf6\xc4\x00\xf6\xce\x00\xa5u\x00\xde\xa5\x00\xf7\xbd\x00\xd6\xad\x08\xdd\xaf\x19\x8cR\x00\xea\xb7\x00\xee\xe9\xdf\xc5\x00\x00\x00\tpHYs\x00\x00\n\xf0\x00\x00\n\xf0\x01B\xac4\x98\x00\x00\x00\x1ctEXtSoftware\x00Adobe Fireworks CS4\x06\xb2\xd3\xa0\x00\x00\x00\x15tEXtCreation Time\x0029/4/09Oq\xfdE\x00\x00\x00\xadIDAT\x18\x95M\x8f\x8d\x0e\x820\x0c\x84;ZdC~f\x07\xb2\x11D\x86\x89\xe8\xfb\xbf\xa0+h\xe2\x97\\\xd2^\x93\xb6\x07:1\x9f)q\x9e\xa5\x06\xad\xd5\x13\x8b\xac,\xb3\x02\x9d\x12C\xa1-\xef;M\x08*\x19\xce\x0e?\x1a\xeb4\xcc\xd4\x0c\x831\x87V\xca\xa1\x1a\xd3\x08@\xe4\xbd\xb7\x15P;\xc8\xd4{\x91\xbf\x11\x90\xffg\xdd\x8di\xfa\xb6\x0bs2Z\xff\xe8yg2\xdc\x11T\x96\xc7\x05\xa5\xef\x96+\xa7\xa59E\xae\xe1\x84cm^1\xa6\xb3\xda\x85\xc8\xd8/\x17se\x0eN^'\x8c\xc7\x8e\x88\xa8\xf6p\x8e\xc2;\xc6.\xd0\x11.\x91o\x12\x7f\xcb\xa5\xfe\x00\x89]\x10:\xf5\x00\x0e\xbf\x00\x00\x00\x00IEND\xaeB`\x82"

        # Owner creation tests
        instrument = IonObject("InstrumentDevice", name="instrument")
        iid, _ = self.resource_registry_service.create(instrument)

        att = Attachment(content=binary, attachment_type=AttachmentType.BLOB)
        aid1 = self.resource_registry_service.create_attachment(iid, att)

        att1 = self.resource_registry_service.read_attachment(aid1)
        self.assertEquals(binary, att1.content)

        import base64

        att = Attachment(content=base64.encodestring(binary), attachment_type=AttachmentType.ASCII)
        aid2 = self.resource_registry_service.create_attachment(iid, att)

        att1 = self.resource_registry_service.read_attachment(aid2)
        self.assertEquals(binary, base64.decodestring(att1.content))

        att_ids = self.resource_registry_service.find_attachments(iid, id_only=True)
        self.assertEquals(att_ids, [aid1, aid2])

        att_ids = self.resource_registry_service.find_attachments(iid, id_only=True, descending=True)
        self.assertEquals(att_ids, [aid2, aid1])

        att_ids = self.resource_registry_service.find_attachments(iid, id_only=True, descending=True, limit=1)
        self.assertEquals(att_ids, [aid2])

        atts = self.resource_registry_service.find_attachments(iid, id_only=False, limit=1)
        self.assertEquals(atts[0].content, att1.content)

        self.resource_registry_service.delete_attachment(aid1)

        att_ids = self.resource_registry_service.find_attachments(iid, id_only=True)
        self.assertEquals(att_ids, [aid2])

        self.resource_registry_service.delete_attachment(aid2)

        att_ids = self.resource_registry_service.find_attachments(iid, id_only=True)
        self.assertEquals(att_ids, [])
class TestResourceRegistry(IonIntegrationTestCase):
    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to bank service
        self.resource_registry_service = ResourceRegistryServiceClient()

    @unittest.skip('Represents a bug in storage/retrieval')
    def test_tuple_in_dict(self):
        # create a resource with a tuple saved in a dict
        transform_obj = IonObject(RT.Transform)
        transform_obj.configuration = {}
        transform_obj.configuration["tuple"] = ('STRING', )
        transform_id, _ = self.resource_registry_service.create(transform_obj)

        # read the resource back
        returned_transform_obj = self.resource_registry_service.read(
            transform_id)

        self.assertEqual(transform_obj.configuration["tuple"],
                         returned_transform_obj.configuration["tuple"])

    def test_basics(self):
        # Sequence all the tests so that we can save numerous system start and stops
        self._do_test_crud()
        self._do_test_read_mult()
        self._do_test_lifecycle()
        self._do_test_attach()
        self._do_test_association()
        self._do_test_find_resources()
        self._do_test_find_objects_mult()

    def _do_test_crud(self):
        # Some quick registry tests
        # Can't call new with fields that aren't defined in the object's schema
        with self.assertRaises(TypeError) as cm:
            IonObject("UserInfo", name="name", foo="bar")
        self.assertTrue(cm.exception.message ==
                        "__init__() got an unexpected keyword argument 'foo'")

        # Can't call new with fields that aren't defined in the object's schema
        with self.assertRaises(TypeError) as cm:
            IonObject("UserInfo", {"name": "name", "foo": "bar"})
        self.assertTrue(cm.exception.message ==
                        "__init__() got an unexpected keyword argument 'foo'")

        # Can't call new with fields that aren't defined in the object's schema
        with self.assertRaises(TypeError) as cm:
            IonObject("UserInfo", {"name": "name"}, foo="bar")
        self.assertTrue(cm.exception.message ==
                        "__init__() got an unexpected keyword argument 'foo'")

        # Instantiate an object
        obj = IonObject("UserInfo", name="name")

        # Can set attributes that aren't in the object's schema
        with self.assertRaises(AttributeError) as cm:
            setattr(obj, "foo", "bar")
        self.assertTrue(
            cm.exception.message == "'UserInfo' object has no attribute 'foo'")

        # Cam't call update with object that hasn't been persisted
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.update(obj)
        self.assertTrue(
            cm.exception.message.startswith(
                "Object does not have required '_id' or '_rev' attribute"))

        # Persist object and read it back
        obj_id, obj_rev = self.resource_registry_service.create(obj)
        read_obj = self.resource_registry_service.read(obj_id)

        # Cannot create object with _id and _rev fields pre-set
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create(read_obj)

        # Update object
        read_obj.name = "John Doe"
        self.resource_registry_service.update(read_obj)

        # Update should fail with revision mismatch
        with self.assertRaises(Conflict) as cm:
            self.resource_registry_service.update(read_obj)

        # Re-read and update object
        read_obj = self.resource_registry_service.read(obj_id)
        self.resource_registry_service.update(read_obj)

        # Delete object
        self.resource_registry_service.delete(obj_id)

        # Make sure read, update and delete report error
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.read(obj_id)
        self.assertTrue(cm.exception.message.startswith("Object with id"))

        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.update(read_obj)
        self.assertTrue(cm.exception.message.startswith("Object with id"))

        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.delete(obj_id)
        self.assertTrue(cm.exception.message.startswith("Object with id"))

        # Owner creation tests
        user = IonObject("ActorIdentity", name='user')
        uid, _ = self.resource_registry_service.create(user)

        inst = IonObject("InstrumentDevice", name='instrument')
        iid, _ = self.resource_registry_service.create(
            inst, headers={'ion-actor-id': str(uid)})

        ids, _ = self.resource_registry_service.find_objects(iid,
                                                             PRED.hasOwner,
                                                             RT.ActorIdentity,
                                                             id_only=True)
        self.assertEquals(len(ids), 1)

        assoc = self.resource_registry_service.read(ids[0])
        self.resource_registry_service.delete(iid)

        with self.assertRaises(NotFound) as ex:
            assoc = self.resource_registry_service.read(iid)

    def _do_test_read_mult(self):
        test_resource1_id, _ = self.resource_registry_service.create(
            Resource(name='test1'))
        test_resource2_id, _ = self.resource_registry_service.create(
            Resource(name='test2'))

        res_list = [test_resource1_id, test_resource2_id]

        objects = self.resource_registry_service.read_mult(res_list)

        for o in objects:
            self.assertIsInstance(o, Resource)
            self.assertTrue(o._id in res_list)

    def _do_test_lifecycle(self):
        # Lifecycle tests
        att = IonObject("InstrumentDevice", name='mine', description='desc')

        rid, rev = self.resource_registry_service.create(att)

        att1 = self.resource_registry_service.read(rid)
        self.assertEquals(att1.name, att.name)
        self.assertEquals(att1.lcstate, LCS.DRAFT)
        self.assertEquals(att1.availability, AS.PRIVATE)

        new_state = self.resource_registry_service.execute_lifecycle_transition(
            rid, LCE.PLAN)
        self.assertEquals(new_state, lcstate(LCS.PLANNED, AS.PRIVATE))

        att2 = self.resource_registry_service.read(rid)
        self.assertEquals(att2.lcstate, LCS.PLANNED)
        self.assertEquals(att2.availability, AS.PRIVATE)

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.execute_lifecycle_transition(
                rid, LCE.UNANNOUNCE)
        self.assertTrue(
            "type=InstrumentDevice, lcstate=PLANNED_PRIVATE has no transition for event unannounce"
            in cm.exception.message)

        new_state = self.resource_registry_service.execute_lifecycle_transition(
            rid, LCE.DEVELOP)
        self.assertEquals(new_state, lcstate(LCS.DEVELOPED, AS.PRIVATE))

        with self.assertRaises(BadRequest):
            self.resource_registry_service.execute_lifecycle_transition(
                resource_id=rid, transition_event='NONE##')

        self.resource_registry_service.set_lifecycle_state(
            rid, lcstate(LCS.INTEGRATED, AS.PRIVATE))
        att1 = self.resource_registry_service.read(rid)
        self.assertEquals(att1.lcstate, LCS.INTEGRATED)
        self.assertEquals(att1.availability, AS.PRIVATE)

    def _do_test_attach(self):
        binary = "\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x10\x00\x00\x00\x10\x08\x03\x00\x00\x00(-\x0fS\x00\x00\x00\x03sBIT\x08\x08\x08\xdb\xe1O\xe0\x00\x00\x00~PLTEf3\x00\xfc\xf7\xe0\xee\xcc\x00\xd3\xa0\x00\xcc\x99\x00\xec\xcdc\x9fl\x00\xdd\xb2\x00\xff\xff\xff|I\x00\xf9\xdb\x00\xdd\xb5\x19\xd9\xad\x10\xb6\x83\x00\xf8\xd6\x00\xf2\xc5\x00\xd8\xab\x00n;\x00\xff\xcc\x00\xd6\xa4\t\xeb\xb8\x00\x83Q\x00\xadz\x00\xff\xde\x00\xff\xd6\x00\xd6\xa3\x00\xdf\xaf\x00\xde\xad\x10\xbc\x8e\x00\xec\xbe\x00\xec\xd4d\xff\xe3\x00tA\x00\xf6\xc4\x00\xf6\xce\x00\xa5u\x00\xde\xa5\x00\xf7\xbd\x00\xd6\xad\x08\xdd\xaf\x19\x8cR\x00\xea\xb7\x00\xee\xe9\xdf\xc5\x00\x00\x00\tpHYs\x00\x00\n\xf0\x00\x00\n\xf0\x01B\xac4\x98\x00\x00\x00\x1ctEXtSoftware\x00Adobe Fireworks CS4\x06\xb2\xd3\xa0\x00\x00\x00\x15tEXtCreation Time\x0029/4/09Oq\xfdE\x00\x00\x00\xadIDAT\x18\x95M\x8f\x8d\x0e\x820\x0c\x84;ZdC~f\x07\xb2\x11D\x86\x89\xe8\xfb\xbf\xa0+h\xe2\x97\\\xd2^\x93\xb6\x07:1\x9f)q\x9e\xa5\x06\xad\xd5\x13\x8b\xac,\xb3\x02\x9d\x12C\xa1-\xef;M\x08*\x19\xce\x0e?\x1a\xeb4\xcc\xd4\x0c\x831\x87V\xca\xa1\x1a\xd3\x08@\xe4\xbd\xb7\x15P;\xc8\xd4{\x91\xbf\x11\x90\xffg\xdd\x8di\xfa\xb6\x0bs2Z\xff\xe8yg2\xdc\x11T\x96\xc7\x05\xa5\xef\x96+\xa7\xa59E\xae\xe1\x84cm^1\xa6\xb3\xda\x85\xc8\xd8/\x17se\x0eN^'\x8c\xc7\x8e\x88\xa8\xf6p\x8e\xc2;\xc6.\xd0\x11.\x91o\x12\x7f\xcb\xa5\xfe\x00\x89]\x10:\xf5\x00\x0e\xbf\x00\x00\x00\x00IEND\xaeB`\x82"

        # Owner creation tests
        instrument = IonObject("InstrumentDevice", name='instrument')
        iid, _ = self.resource_registry_service.create(instrument)

        att = Attachment(content=binary, attachment_type=AttachmentType.BLOB)
        aid1 = self.resource_registry_service.create_attachment(iid, att)

        att1 = self.resource_registry_service.read_attachment(
            aid1, include_content=True)
        self.assertEquals(binary, att1.content)

        import base64
        att = Attachment(content=base64.encodestring(binary),
                         attachment_type=AttachmentType.ASCII)
        aid2 = self.resource_registry_service.create_attachment(iid, att)

        att1 = self.resource_registry_service.read_attachment(
            aid2, include_content=True)
        self.assertEquals(binary, base64.decodestring(att1.content))

        att_ids = self.resource_registry_service.find_attachments(iid,
                                                                  id_only=True)
        self.assertEquals(att_ids, [aid1, aid2])

        att_ids = self.resource_registry_service.find_attachments(
            iid, id_only=True, descending=True)
        self.assertEquals(att_ids, [aid2, aid1])

        att_ids = self.resource_registry_service.find_attachments(
            iid, id_only=True, descending=True, limit=1)
        self.assertEquals(att_ids, [aid2])

        atts = self.resource_registry_service.find_attachments(
            iid, id_only=False, include_content=True, limit=1)
        self.assertEquals(atts[0].content, binary)

        self.resource_registry_service.delete_attachment(aid1)

        att_ids = self.resource_registry_service.find_attachments(iid,
                                                                  id_only=True)
        self.assertEquals(att_ids, [aid2])

        self.resource_registry_service.delete_attachment(aid2)

        att_ids = self.resource_registry_service.find_attachments(iid,
                                                                  id_only=True)
        self.assertEquals(att_ids, [])

    def _do_test_association(self):
        # Instantiate ActorIdentity object
        actor_identity_obj = IonObject("ActorIdentity", name="name")
        actor_identity_obj_id, actor_identity_obj_rev = self.resource_registry_service.create(
            actor_identity_obj)
        read_actor_identity_obj = self.resource_registry_service.read(
            actor_identity_obj_id)

        # Instantiate UserInfo object
        user_info_obj = IonObject("UserInfo", name="name")
        user_info_obj_id, user_info_obj_rev = self.resource_registry_service.create(
            user_info_obj)
        read_user_info_obj = self.resource_registry_service.read(
            user_info_obj_id)

        # Test create failures
        with self.assertRaises(AttributeError) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj_id, PRED.bogus, user_info_obj_id)
        self.assertTrue(cm.exception.message == "bogus")

        # Predicate not provided
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj_id, None, user_info_obj_id)
        self.assertTrue(
            cm.exception.message == "Association must have all elements set")

        # Subject id or object not provided
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                None, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(
            cm.exception.message == "Association must have all elements set")

        # Object id or object not provided
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj_id, PRED.hasInfo, None)
        self.assertTrue(
            cm.exception.message == "Association must have all elements set")

        # Bad subject id
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.create_association(
                "bogus", PRED.hasInfo, user_info_obj_id)
        self.assertTrue(
            cm.exception.message == "Object with id bogus does not exist.")

        # Bad object id
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj_id, PRED.hasInfo, "bogus")
        self.assertTrue(
            cm.exception.message == "Object with id bogus does not exist.")

        # _id missing from subject
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(cm.exception.message.startswith("Subject id"))

        # _id missing from object
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj_id, PRED.hasInfo, user_info_obj)
        self.assertTrue(cm.exception.message.startswith("Object id"))

        # Wrong subject type
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                user_info_obj_id, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(cm.exception.message ==
                        "Illegal subject type UserInfo for predicate hasInfo")

        # Wrong object type
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj_id, PRED.hasInfo, actor_identity_obj_id)
        self.assertTrue(
            cm.exception.message ==
            "Illegal object type ActorIdentity for predicate hasInfo")

        # Create two different association types between the same subject and predicate
        assoc_id1, assoc_rev1 = self.resource_registry_service.create_association(
            actor_identity_obj_id, PRED.hasInfo, user_info_obj_id)

        # Read object, subject
        res_obj1 = self.resource_registry_service.read_object(
            actor_identity_obj_id, PRED.hasInfo, RT.UserInfo)
        self.assertEquals(res_obj1._id, user_info_obj_id)
        res_obj1 = self.resource_registry_service.read_object(
            actor_identity_obj_id, PRED.hasInfo, RT.UserInfo, id_only=True)
        self.assertEquals(res_obj1, user_info_obj_id)
        res_obj2 = self.resource_registry_service.read_subject(
            RT.ActorIdentity, PRED.hasInfo, user_info_obj_id)
        self.assertEquals(res_obj2._id, actor_identity_obj_id)
        res_obj2 = self.resource_registry_service.read_subject(
            RT.ActorIdentity, PRED.hasInfo, user_info_obj_id, id_only=True)
        self.assertEquals(res_obj2, actor_identity_obj_id)

        # Search for associations (good cases)
        ret1 = self.resource_registry_service.find_associations(
            actor_identity_obj_id, PRED.hasInfo, user_info_obj_id)
        ret2 = self.resource_registry_service.find_associations(
            actor_identity_obj_id, PRED.hasInfo)
        ret3 = self.resource_registry_service.find_associations(
            None, PRED.hasInfo)
        self.assertTrue(len(ret1) == len(ret2) == len(ret3))
        self.assertTrue(ret1[0]._id == ret2[0]._id == ret3[0]._id)

        ret1 = self.resource_registry_service.find_associations(
            actor_identity_obj_id, PRED.hasInfo, user_info_obj_id, None, False)
        ret2 = self.resource_registry_service.find_associations(
            actor_identity_obj_id, PRED.hasInfo, id_only=False)
        ret3 = self.resource_registry_service.find_associations(
            predicate=PRED.hasInfo, id_only=False)
        self.assertTrue(ret1 == ret2 == ret3)

        # Search for associations (good cases)
        ret1 = self.resource_registry_service.find_associations(
            read_actor_identity_obj, PRED.hasInfo, read_user_info_obj)
        ret2 = self.resource_registry_service.find_associations(
            read_actor_identity_obj, PRED.hasInfo)
        ret3 = self.resource_registry_service.find_associations(
            None, PRED.hasInfo)
        self.assertTrue(len(ret1) == len(ret2) == len(ret3))
        self.assertTrue(ret1[0]._id == ret2[0]._id == ret3[0]._id)

        ret1 = self.resource_registry_service.find_associations(
            actor_identity_obj_id, PRED.hasInfo, read_user_info_obj, None,
            True)
        ret2 = self.resource_registry_service.find_associations(
            actor_identity_obj_id, PRED.hasInfo, id_only=True)
        ret3 = self.resource_registry_service.find_associations(
            predicate=PRED.hasInfo, id_only=True)
        self.assertTrue(ret1 == ret2 == ret3)

        # Search for associations (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_associations(None, None, None)
        self.assertIn("Illegal parameters", cm.exception.message)

        # Find subjects (good cases)
        subj_ret1 = self.resource_registry_service.find_subjects(
            RT.ActorIdentity, PRED.hasInfo, user_info_obj_id, True)
        subj_ret2 = self.resource_registry_service.find_subjects(
            RT.ActorIdentity, PRED.hasInfo, read_user_info_obj, True)
        self.assertTrue(len(subj_ret1) == len(subj_ret2))
        self.assertTrue(subj_ret1[0] == subj_ret2[0])
        self.assertTrue(subj_ret1[1][0]._id == subj_ret2[1][0]._id)

        subj_ret3 = self.resource_registry_service.find_subjects(
            None, PRED.hasInfo, user_info_obj_id, True)
        subj_ret4 = self.resource_registry_service.find_subjects(
            None, None, read_user_info_obj, True)
        self.assertTrue(len(subj_ret3) == len(subj_ret4))
        self.assertTrue(subj_ret3[0] == subj_ret4[0])
        self.assertTrue(subj_ret3[1][0]._id == subj_ret4[1][0]._id)

        subj_ret5 = self.resource_registry_service.find_subjects(
            None, PRED.hasInfo, user_info_obj_id, False)
        subj_ret6 = self.resource_registry_service.find_subjects(
            None, None, read_user_info_obj, False)
        self.assertTrue(len(subj_ret5) == len(subj_ret6))
        self.assertTrue(subj_ret5[0][0]._id == subj_ret6[0][0]._id)
        self.assertTrue(subj_ret5[1][0]._id == subj_ret6[1][0]._id)

        # Find subjects (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_subjects(None, None, None)
        self.assertTrue(cm.exception.message == "Must provide object")

        with self.assertRaises(AttributeError) as cm:
            self.resource_registry_service.find_subjects(
                RT.UserCredentials, PRED.bogus, user_info_obj_id, True)
        self.assertTrue(cm.exception.message == "bogus")

        ret = self.resource_registry_service.find_subjects(
            RT.UserInfo, PRED.hasCredentials, user_info_obj_id, True)
        self.assertTrue(len(ret[0]) == 0)

        ret = self.resource_registry_service.find_subjects(
            RT.UserCredentials, PRED.hasInfo, user_info_obj_id, True)
        self.assertTrue(len(ret[0]) == 0)

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_subjects(
                RT.UserCredentials, PRED.hasInfo, user_info_obj, True)
        self.assertTrue(
            cm.exception.message == "Object id not available in object")

        # Find objects (good cases)
        subj_ret1 = self.resource_registry_service.find_objects(
            actor_identity_obj_id, PRED.hasInfo, RT.UserInfo, True)
        subj_ret2 = self.resource_registry_service.find_objects(
            read_actor_identity_obj, PRED.hasInfo, RT.UserInfo, True)
        self.assertTrue(len(subj_ret1) == len(subj_ret2))
        self.assertTrue(subj_ret1[0] == subj_ret2[0])
        self.assertTrue(subj_ret1[1][0]._id == subj_ret2[1][0]._id)

        subj_ret3 = self.resource_registry_service.find_objects(
            actor_identity_obj_id, PRED.hasInfo, None, True)
        subj_ret4 = self.resource_registry_service.find_objects(
            actor_identity_obj_id, None, None, True)
        self.assertTrue(len(subj_ret3) == len(subj_ret4))
        self.assertTrue(subj_ret3[0] == subj_ret4[0])
        self.assertTrue(subj_ret3[1][0]._id == subj_ret4[1][0]._id)

        subj_ret5 = self.resource_registry_service.find_objects(
            actor_identity_obj_id, PRED.hasInfo, None, False)
        subj_ret6 = self.resource_registry_service.find_objects(
            read_actor_identity_obj, None, None, False)
        self.assertTrue(len(subj_ret5) == len(subj_ret6))
        self.assertTrue(subj_ret5[0][0]._id == subj_ret6[0][0]._id)
        self.assertTrue(subj_ret5[1][0]._id == subj_ret6[1][0]._id)

        # Find objects (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_objects(None, None, None)
        self.assertTrue(cm.exception.message == "Must provide subject")

        with self.assertRaises(AttributeError) as cm:
            self.resource_registry_service.find_objects(
                actor_identity_obj_id, PRED.bogus, RT.UserCredentials, True)
        self.assertTrue(cm.exception.message == "bogus")

        ret = self.resource_registry_service.find_objects(
            actor_identity_obj_id, PRED.hasCredentials, RT.ActorIdentity, True)
        self.assertTrue(len(ret[0]) == 0)

        ret = self.resource_registry_service.find_objects(
            actor_identity_obj_id, PRED.hasInfo, RT.UserCredentials, True)
        self.assertTrue(len(ret[0]) == 0)

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_objects(
                actor_identity_obj, PRED.hasInfo, RT.UserInfo, True)
        self.assertTrue(
            cm.exception.message == "Object id not available in subject")

        # Get association (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.get_association(None, None, None)
        self.assertIn("Illegal parameters", cm.exception.message)

        assoc = self.resource_registry_service.get_association(
            actor_identity_obj_id, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(assoc._id == assoc_id1)

        # Delete (bad cases)
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.delete_association("bogus")
        self.assertTrue(
            cm.exception.message == "Object with id bogus does not exist.")

        # Delete other association
        self.resource_registry_service.delete_association(assoc_id1)

        # Delete resources
        self.resource_registry_service.delete(actor_identity_obj_id)
        self.resource_registry_service.delete(user_info_obj_id)

    def _do_test_find_resources(self):
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_resources(
                RT.UserInfo, LCS.DRAFT, "name", False)
        self.assertTrue(
            cm.exception.message == "find by name does not support lcstate")

        ret = self.resource_registry_service.find_resources(
            RT.UserInfo, None, "name", False)
        self.assertEquals(len(ret[0]), 0)

        # Instantiate an object
        obj = IonObject("InstrumentAgentInstance", name="name")

        # Persist object and read it back
        obj_id, obj_rev = self.resource_registry_service.create(obj)
        read_obj = self.resource_registry_service.read(obj_id)

        ret = self.resource_registry_service.find_resources(
            RT.InstrumentAgentInstance, None, "name", False)
        self.assertEquals(len(ret[0]), 1)
        self.assertEquals(ret[0][0]._id, read_obj._id)

        ret = self.resource_registry_service.find_resources(
            RT.InstrumentAgentInstance, LCS.DEPLOYED, None, False)
        self.assertEquals(len(ret[0]), 1)
        self.assertEquals(ret[0][0]._id, read_obj._id)

    def _do_test_find_objects_mult(self):
        dp = DataProcess()
        transform = Transform()
        pd = ProcessDefinition()

        dp_id, _ = self.resource_registry_service.create(dp)
        transform_id, _ = self.resource_registry_service.create(transform)
        pd_id, _ = self.resource_registry_service.create(pd)

        self.resource_registry_service.create_association(
            subject=dp_id, object=transform_id, predicate=PRED.hasTransform)
        self.resource_registry_service.create_association(
            subject=transform_id,
            object=pd_id,
            predicate=PRED.hasProcessDefinition)

        results, _ = self.resource_registry_service.find_objects_mult(
            subjects=[dp_id], id_only=True)
        self.assertTrue(results == [transform_id])

        results, _ = self.resource_registry_service.find_objects_mult(
            subjects=[dp_id, transform_id], id_only=True)
        results.sort()
        correct = [transform_id, pd_id]
        correct.sort()
        self.assertTrue(results == correct)

    @attr('EXT')
    def test_get_resource_extension(self):

        #Testing multiple instrument owners
        subject1 = "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Roger Unwin A254"

        actor_identity_obj1 = IonObject(RT.ActorIdentity, {"name": subject1})
        actor_id1, _ = self.resource_registry_service.create(
            actor_identity_obj1)

        user_info_obj1 = IonObject(RT.UserInfo, {"name": "Foo"})
        user_info_id1, _ = self.resource_registry_service.create(
            user_info_obj1)
        self.resource_registry_service.create_association(
            actor_id1, PRED.hasInfo, user_info_id1)

        subject2 = "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Bob Cumbers A256"

        actor_identity_obj2 = IonObject(RT.ActorIdentity, {"name": subject2})
        actor_id2, _ = self.resource_registry_service.create(
            actor_identity_obj2)

        user_info_obj2 = IonObject(RT.UserInfo, {"name": "Foo2"})
        user_info_id2, _ = self.resource_registry_service.create(
            user_info_obj2)
        self.resource_registry_service.create_association(
            actor_id2, PRED.hasInfo, user_info_id2)

        test_obj = IonObject(RT.InformationResource, {"name": "TestResource"})
        test_obj_id, _ = self.resource_registry_service.create(test_obj)
        self.resource_registry_service.create_association(
            test_obj_id, PRED.hasOwner, actor_id1)
        self.resource_registry_service.create_association(
            test_obj_id, PRED.hasOwner, actor_id2)

        extended_resource = self.resource_registry_service.get_resource_extension(
            test_obj_id, OT.ExtendedInformationResource)

        self.assertEqual(test_obj_id, extended_resource._id)
        self.assertEqual(len(extended_resource.owners), 2)

        extended_resource_list = self.resource_registry_service.get_resource_extension(
            str([user_info_id1, user_info_id2]),
            OT.ExtendedInformationResource)
        self.assertEqual(len(extended_resource_list), 2)

        optional_args = {'user_id': user_info_id1}
        extended_resource = self.resource_registry_service.get_resource_extension(
            test_obj_id,
            OT.TestExtendedInformationResource,
            optional_args=optional_args)

        self.assertEqual(test_obj_id, extended_resource._id)
        self.assertEqual(len(extended_resource.owners), 2)
        self.assertEqual(extended_resource.user_id, user_info_id1)

    @attr('PREP')
    def test_prepare_resource_support(self):

        prepare_data = self.resource_registry_service.prepare_resource_support(
            resource_type=RT.StreamDefinition)

        self.assertEqual(prepare_data.create_request.service_name,
                         "resource_registry")
        self.assertEqual(prepare_data.create_request.service_operation,
                         "create")
        self.assertEqual(prepare_data.create_request.request_parameters,
                         {"object": "$(object)"})

        self.assertEqual(prepare_data.update_request.service_name,
                         "resource_registry")
        self.assertEqual(prepare_data.update_request.service_operation,
                         "update")
        self.assertEqual(prepare_data.update_request.request_parameters,
                         {"object": "$(object)"})

        res_id, _ = self.resource_registry_service.create(
            prepare_data.resource)

        prepare_data = self.resource_registry_service.prepare_resource_support(
            resource_type=RT.StreamDefinition, resource_id=res_id)

        prepare_data.resource.name = "test_stream_def"
        prepare_data.resource.stream_type = "test_type"

        stream_def_id, _ = self.resource_registry_service.update(
            prepare_data.resource)

        #def ion_object_encoder(obj):
        #    return obj.__dict__

        #print simplejson.dumps(prepare_data, default=ion_object_encoder, indent=2)

        stream_def = self.resource_registry_service.read(stream_def_id)

        self.assertEqual(stream_def.name, prepare_data.resource.name)
        self.assertEqual(stream_def.stream_type,
                         prepare_data.resource.stream_type)
class TestIMSRegisterAgentIntegration(IonIntegrationTestCase):

    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()
        #container = Container()
        #print 'starting container'
        #container.start()
        #print 'started container'

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        self.RR  = ResourceRegistryServiceClient(node=self.container.node)
        self.IMS = InstrumentManagementServiceClient(node=self.container.node)

        print 'started services'

#    @unittest.skip('this test just for debugging setup')
#    def test_just_the_setup(self):
#        return

    def test_register_instrument_agent_int(self):

        #test ssh-ability
        cfg_host = CFG.service.instrument_management.driver_release_host #'amoeaba.ucsd.edu'
        cfg_user = pwd.getpwuid(os.getuid())[0]

        if "driver_release_user" in CFG.service.instrument_management:
            cfg_user = CFG.service.instrument_management.driver_release_user

        remotehost = "%s@%s" % (cfg_user, cfg_host)

        ssh_retval = subprocess.call(["ssh", "-o", "PasswordAuthentication=no",
                                      "-o", "StrictHostKeyChecking=no",
                                      remotehost, "-f", "true"],
                                    stdout=open(os.devnull),
                                    stderr=open(os.devnull))
        
        if 0 != ssh_retval:
            raise unittest.SkipTest("SSH/SCP credentials to %s didn't work" % remotehost)



        inst_agent_id = self.IMS.create_instrument_agent(any_old(RT.InstrumentAgent))
        inst_model_id = self.IMS.create_instrument_model(any_old(RT.InstrumentModel))

        self.IMS.assign_instrument_model_to_instrument_agent(inst_model_id, inst_agent_id)

        self.IMS.register_instrument_agent(inst_agent_id, BASE64_EGG, BASE64_ZIPFILE)

        attachments, _ = self.RR.find_objects(inst_agent_id, PRED.hasAttachment, RT.Attachment, True)

        self.assertEqual(len(attachments), 4)

        for a_id in attachments:

            a = self.RR.read_attachment(a_id, include_content=True)

            parts = string.split(a.name, ".")

            # we expect a.txt to contain "aaa" and have a keyword listed called "a"
            if "txt" == parts[1]:
                self.assertEqual("text/plain", a.content_type)
                self.assertIn(parts[0], a.keywords)
                self.assertEqual(a.content, str(parts[0] * 3) + "\n")

            elif "text/url" == a.content_type:
                remote_url = a.content.split("URL=")[1]

                failmsg = ""
                try:
                    code = urlopen(remote_url).code
                    if 400 <= code:
                        failmsg = "HTTP code %s" % code
                except Exception as e:
                    failmsg = str(e)

                if failmsg:
                    self.fail(("Uploaded succeeded, but fetching '%s' failed with '%s'. ") %
                              (remote_url, failmsg))


        log.info("L4-CI-SA-RQ-148")
        log.info("L4-CI-SA-RQ-148: The test services shall ensure that test results are incorporated into physical resource metadata. ")
        log.info("L4-CI-SA-RQ-336: Instrument activation shall support the registration of Instrument Agents")

        # cleanup
        self.IMS.force_delete_instrument_agent(inst_agent_id)
        self.IMS.force_delete_instrument_model(inst_model_id)

        return
class TestAgentLaunchOps(IonIntegrationTestCase):
    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()
        #container = Container()
        #print 'starting container'
        #container.start()
        #print 'started container'
        unittest  # suppress an pycharm inspector error if all unittest.skip references are commented out

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        self.RR = ResourceRegistryServiceClient(node=self.container.node)
        self.IMS = InstrumentManagementServiceClient(node=self.container.node)
        self.IDS = IdentityManagementServiceClient(node=self.container.node)
        self.PSC = PubsubManagementServiceClient(node=self.container.node)
        self.DP = DataProductManagementServiceClient(node=self.container.node)
        self.DAMS = DataAcquisitionManagementServiceClient(
            node=self.container.node)
        self.DSC = DatasetManagementServiceClient(node=self.container.node)
        self.PDC = ProcessDispatcherServiceClient(node=self.container.node)
        self.OMS = ObservatoryManagementServiceClient(node=self.container.node)
        self.RR2 = EnhancedResourceRegistryClient(self.RR)

#    @unittest.skip('this test just for debugging setup')
#    def test_just_the_setup(self):
#        return

    def test_get_agent_client_noprocess(self):
        inst_device_id = self.RR2.create(any_old(RT.InstrumentDevice))

        iap = ResourceAgentClient._get_agent_process_id(inst_device_id)

        # should be no running agent
        self.assertIsNone(iap)

        # should raise NotFound
        self.assertRaises(NotFound, ResourceAgentClient, inst_device_id)

    def test_resource_state_save_restore(self):

        # Create InstrumentModel
        instModel_obj = IonObject(RT.InstrumentModel,
                                  name='SBE37IMModel',
                                  description="SBE37IMModel")
        instModel_id = self.IMS.create_instrument_model(instModel_obj)
        log.debug('new InstrumentModel id = %s ', instModel_id)

        # Create InstrumentAgent
        raw_config = StreamConfiguration(
            stream_name='raw', parameter_dictionary_name='ctd_raw_param_dict')
        parsed_config = StreamConfiguration(
            stream_name='parsed',
            parameter_dictionary_name='ctd_parsed_param_dict')
        instAgent_obj = IonObject(
            RT.InstrumentAgent,
            name='agent007',
            description="SBE37IMAgent",
            driver_uri=DRV_URI_GOOD,
            stream_configurations=[raw_config, parsed_config])
        instAgent_id = self.IMS.create_instrument_agent(instAgent_obj)
        log.debug('new InstrumentAgent id = %s', instAgent_id)

        self.IMS.assign_instrument_model_to_instrument_agent(
            instModel_id, instAgent_id)

        # Create InstrumentDevice
        log.debug(
            'test_activateInstrumentSample: Create instrument resource to represent the SBE37 '
            + '(SA Req: L4-CI-SA-RQ-241) ')
        instDevice_obj = IonObject(RT.InstrumentDevice,
                                   name='SBE37IMDevice',
                                   description="SBE37IMDevice",
                                   serial_number="12345")
        instDevice_id = self.IMS.create_instrument_device(
            instrument_device=instDevice_obj)
        self.IMS.assign_instrument_model_to_instrument_device(
            instModel_id, instDevice_id)

        log.debug(
            "test_activateInstrumentSample: new InstrumentDevice id = %s    (SA Req: L4-CI-SA-RQ-241) ",
            instDevice_id)

        port_agent_config = {
            'device_addr': CFG.device.sbe37.host,
            'device_port': CFG.device.sbe37.port,
            'process_type': PortAgentProcessType.UNIX,
            'binary_path': "port_agent",
            'port_agent_addr': 'localhost',
            'command_port': CFG.device.sbe37.port_agent_cmd_port,
            'data_port': CFG.device.sbe37.port_agent_data_port,
            'log_level': 5,
            'type': PortAgentType.ETHERNET
        }

        instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance,
                                          name='SBE37IMAgentInstance',
                                          description="SBE37IMAgentInstance",
                                          port_agent_config=port_agent_config)

        instAgentInstance_id = self.IMS.create_instrument_agent_instance(
            instAgentInstance_obj, instAgent_id, instDevice_id)

        spdict_id = self.DSC.read_parameter_dictionary_by_name(
            'ctd_parsed_param_dict', id_only=True)
        parsed_stream_def_id = self.PSC.create_stream_definition(
            name='parsed', parameter_dictionary_id=spdict_id)

        rpdict_id = self.DSC.read_parameter_dictionary_by_name(
            'ctd_raw_param_dict', id_only=True)
        raw_stream_def_id = self.PSC.create_stream_definition(
            name='raw', parameter_dictionary_id=rpdict_id)

        #-------------------------------
        # Create Raw and Parsed Data Products for the device
        #-------------------------------

        dp_obj = IonObject(RT.DataProduct,
                           name='the parsed data',
                           description='ctd stream test')

        data_product_id1 = self.DP.create_data_product(
            data_product=dp_obj, stream_definition_id=parsed_stream_def_id)

        log.debug('new dp_id = %s', data_product_id1)

        self.DAMS.assign_data_product(input_resource_id=instDevice_id,
                                      data_product_id=data_product_id1)
        self.DP.activate_data_product_persistence(
            data_product_id=data_product_id1)
        self.addCleanup(self.DP.suspend_data_product_persistence,
                        data_product_id1)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.RR.find_objects(data_product_id1, PRED.hasStream,
                                             None, True)
        log.debug('Data product streams1 = %s', stream_ids)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        dataset_ids, _ = self.RR.find_objects(data_product_id1,
                                              PRED.hasDataset, RT.Dataset,
                                              True)
        log.debug('Data set for data_product_id1 = %s', dataset_ids[0])
        self.parsed_dataset = dataset_ids[0]
        #create the datastore at the beginning of each int test that persists data

        dp_obj = IonObject(RT.DataProduct,
                           name='the raw data',
                           description='raw stream test')

        data_product_id2 = self.DP.create_data_product(
            data_product=dp_obj, stream_definition_id=raw_stream_def_id)
        log.debug('new dp_id = %s', str(data_product_id2))

        self.DAMS.assign_data_product(input_resource_id=instDevice_id,
                                      data_product_id=data_product_id2)

        self.DP.activate_data_product_persistence(
            data_product_id=data_product_id2)
        self.addCleanup(self.DP.suspend_data_product_persistence,
                        data_product_id2)

        # spin up agent
        self.IMS.start_instrument_agent_instance(
            instrument_agent_instance_id=instAgentInstance_id)

        self.addCleanup(self.IMS.stop_instrument_agent_instance,
                        instrument_agent_instance_id=instAgentInstance_id)

        #wait for start
        instance_obj = self.IMS.read_instrument_agent_instance(
            instAgentInstance_id)
        gate = AgentProcessStateGate(self.PDC.read_process, instDevice_id,
                                     ProcessStateEnum.RUNNING)
        self.assertTrue(
            gate. await (30),
            "The instrument agent instance (%s) did not spawn in 30 seconds" %
            gate.process_id)

        # take snapshot of config
        snap_id = self.IMS.save_resource_state(instDevice_id, "xyzzy snapshot")
        snap_obj = self.RR.read_attachment(snap_id, include_content=True)

        #modify config
        instance_obj.driver_config["comms_config"] = "BAD_DATA"
        self.RR.update(instance_obj)

        #restore config
        self.IMS.restore_resource_state(instDevice_id, snap_id)
        instance_obj = self.RR.read(instAgentInstance_id)

        if "BAD_DATA" == instance_obj.driver_config["comms_config"]:
            print "Saved config:"
            print snap_obj.content
            self.fail("Saved config was not properly restored")

        self.assertNotEqual("BAD_DATA",
                            instance_obj.driver_config["comms_config"])

        self.DP.delete_data_product(data_product_id1)
        self.DP.delete_data_product(data_product_id2)

    def test_agent_instance_config_hasDevice(self):
        def assign_fn(child_device_id, parent_device_id):
            self.RR2.create_association(parent_device_id, PRED.hasDevice,
                                        child_device_id)

        def find_fn(parent_device_id):
            ret, _ = self.RR.find_objects(subject=parent_device_id,
                                          predicate=PRED.hasDevice,
                                          id_only=True)
            return ret

        self.base_agent_instance_config(assign_fn, find_fn)
        log.info("END test_agent_instance_config_hasDevice")

    def test_agent_instance_config_hasNetworkParent(self):
        def assign_fn(child_device_id, parent_device_id):
            self.RR2.create_association(child_device_id, PRED.hasNetworkParent,
                                        parent_device_id)

        def find_fn(parent_device_id):
            ret, _ = self.RR.find_subjects(object=parent_device_id,
                                           predicate=PRED.hasNetworkParent,
                                           id_only=True)
            return ret

        self.base_agent_instance_config(assign_fn, find_fn)
        log.info("END test_agent_instance_config_hasNetworkParent")

    def base_agent_instance_config(
            self, assign_child_platform_to_parent_platform_fn,
            find_child_platform_ids_of_parent_platform_fn):
        """
        Verify that agent configurations are being built properly
        """
        clients = DotDict()
        clients.resource_registry = self.RR
        clients.pubsub_management = self.PSC
        clients.dataset_management = self.DSC
        config_builder = DotDict
        config_builder.i = None
        config_builder.p = None

        def refresh_pconfig_builder_hack(config_builder):
            """
            ugly hack to get around "idempotent" RR2 caching
            remove after https://github.com/ooici/coi-services/pull/1190
            """
            config_builder.p = PlatformAgentConfigurationBuilder(clients)

        def refresh_iconfig_builder_hack(config_builder):
            """
            ugly hack to get around "idempotent" RR2 caching
            remove after https://github.com/ooici/coi-services/pull/1190
            """
            config_builder.i = InstrumentAgentConfigurationBuilder(clients)

        org_obj = any_old(RT.Org)
        org_id = self.RR2.create(org_obj)

        inst_startup_config = {'startup': 'config'}

        generic_alerts_config = [{'lvl2': 'lvl3val'}]

        required_config_keys = [
            'org_governance_name', 'device_type', 'agent', 'driver_config',
            'stream_config', 'startup_config', 'aparam_alerts_config',
            'children'
        ]

        def verify_instrument_config(config, device_id):
            for key in required_config_keys:
                self.assertIn(key, config)
            self.assertEqual(org_obj.org_governance_name,
                             config['org_governance_name'])
            self.assertEqual(RT.InstrumentDevice, config['device_type'])
            self.assertIn('driver_config', config)
            driver_config = config['driver_config']
            expected_driver_fields = {
                'process_type': ('ZMQPyClassDriverLauncher', ),
            }
            for k, v in expected_driver_fields.iteritems():
                self.assertIn(k, driver_config)
                self.assertEqual(v, driver_config[k])
            self.assertEqual

            self.assertIn('resource_id', config['agent'])
            self.assertEqual(device_id, config['agent']['resource_id'])
            self.assertEqual(inst_startup_config, config['startup_config'])
            self.assertIn('aparam_alerts_config', config)
            self.assertEqual(generic_alerts_config,
                             config['aparam_alerts_config'])
            self.assertIn('stream_config', config)
            for key in ['children']:
                self.assertEqual({}, config[key])

        # TODO(OOIION-1495) review the asserts below related with
        # requiring 'ports' to be present in the driver_config.
        # See recent adjustment in agent_configuration_builder.py,
        # which I did to avoid other tests to fail.
        # The asserts below would make the following tests fail:
        #  test_agent_instance_config_hasDevice
        #  test_agent_instance_config_hasNetworkParent

        def verify_child_config(config, device_id, inst_device_id=None):
            for key in required_config_keys:
                self.assertIn(key, config)
            self.assertEqual(org_obj.org_governance_name,
                             config['org_governance_name'])
            self.assertEqual(RT.PlatformDevice, config['device_type'])
            self.assertIn('resource_id', config['agent'])
            self.assertEqual(device_id, config['agent']['resource_id'])
            self.assertIn('aparam_alerts_config', config)
            self.assertEqual(generic_alerts_config,
                             config['aparam_alerts_config'])
            self.assertIn('stream_config', config)
            self.assertIn('driver_config', config)
            self.assertIn('foo', config['driver_config'])
            """
            self.assertIn('ports', config['driver_config'])
            """
            self.assertEqual('bar', config['driver_config']['foo'])
            self.assertIn('process_type', config['driver_config'])
            self.assertEqual(('ZMQPyClassDriverLauncher', ),
                             config['driver_config']['process_type'])

            if None is inst_device_id:
                for key in ['children', 'startup_config']:
                    self.assertEqual({}, config[key])
            else:
                for key in ['startup_config']:
                    self.assertEqual({}, config[key])

                self.assertIn(inst_device_id, config['children'])
                verify_instrument_config(config['children'][inst_device_id],
                                         inst_device_id)
            """
            if config['driver_config']['ports']:
                self.assertTrue( isinstance(config['driver_config']['ports'], dict) )
            """

        def verify_parent_config(config,
                                 parent_device_id,
                                 child_device_id,
                                 inst_device_id=None):
            for key in required_config_keys:
                self.assertIn(key, config)
            self.assertEqual(org_obj.org_governance_name,
                             config['org_governance_name'])
            self.assertEqual(RT.PlatformDevice, config['device_type'])
            self.assertIn('process_type', config['driver_config'])
            """
            self.assertIn('ports', config['driver_config'])
            """
            self.assertEqual(('ZMQPyClassDriverLauncher', ),
                             config['driver_config']['process_type'])
            self.assertIn('resource_id', config['agent'])
            self.assertEqual(parent_device_id, config['agent']['resource_id'])
            self.assertIn('aparam_alerts_config', config)
            self.assertEqual(generic_alerts_config,
                             config['aparam_alerts_config'])
            self.assertIn('stream_config', config)
            for key in ['startup_config']:
                self.assertEqual({}, config[key])
            """
            if config['driver_config']['ports']:
                self.assertTrue( isinstance(config['driver_config']['ports'], dict) )
            """
            self.assertIn(child_device_id, config['children'])
            verify_child_config(config['children'][child_device_id],
                                child_device_id, inst_device_id)

        rpdict_id = self.DSC.read_parameter_dictionary_by_name(
            'ctd_raw_param_dict', id_only=True)
        raw_stream_def_id = self.PSC.create_stream_definition(
            name='raw', parameter_dictionary_id=rpdict_id)

        #todo: create org and figure out which agent resource needs to get assigned to it

        def _make_platform_agent_structure(name='', agent_config=None):
            if None is agent_config: agent_config = {}

            # instance creation
            platform_agent_instance_obj = any_old(
                RT.PlatformAgentInstance, {
                    'driver_config': {
                        'foo': 'bar'
                    },
                    'alerts': generic_alerts_config
                })
            platform_agent_instance_obj.agent_config = agent_config
            platform_agent_instance_id = self.IMS.create_platform_agent_instance(
                platform_agent_instance_obj)

            # agent creation
            raw_config = StreamConfiguration(
                stream_name='raw',
                parameter_dictionary_name='ctd_raw_param_dict')
            platform_agent_obj = any_old(
                RT.PlatformAgent, {"stream_configurations": [raw_config]})
            platform_agent_id = self.IMS.create_platform_agent(
                platform_agent_obj)

            # device creation
            platform_device_id = self.IMS.create_platform_device(
                any_old(RT.PlatformDevice))

            # data product creation
            dp_obj = any_old(RT.DataProduct)
            dp_id = self.DP.create_data_product(
                data_product=dp_obj, stream_definition_id=raw_stream_def_id)
            self.DAMS.assign_data_product(input_resource_id=platform_device_id,
                                          data_product_id=dp_id)
            self.DP.activate_data_product_persistence(data_product_id=dp_id)
            self.addCleanup(self.DP.suspend_data_product_persistence, dp_id)

            #deployment creation
            site_obj = IonObject(RT.PlatformSite, name='sitePlatform')
            site_id = self.OMS.create_platform_site(platform_site=site_obj)

            # find current deployment using time constraints
            current_time = int(calendar.timegm(time.gmtime()))
            # two years on either side of current time
            start = current_time - 63115200
            end = current_time + 63115200
            temporal_bounds = IonObject(OT.TemporalBounds,
                                        name='planned',
                                        start_datetime=str(start),
                                        end_datetime=str(end))
            platform_port_obj = IonObject(
                OT.PlatformPort,
                reference_designator='GA01SUMO-FI003-09-CTDMO0999',
                port_type=PortTypeEnum.UPLINK,
                ip_address=0)
            deployment_obj = IonObject(
                RT.Deployment,
                name='TestPlatformDeployment_' + name,
                description='some new deployment',
                context=IonObject(OT.CabledNodeDeploymentContext),
                constraint_list=[temporal_bounds],
                port_assignments={platform_device_id: platform_port_obj})

            deploy_id = self.OMS.create_deployment(
                deployment=deployment_obj,
                site_id=site_id,
                device_id=platform_device_id)

            # assignments
            self.RR2.assign_platform_agent_instance_to_platform_device_with_has_agent_instance(
                platform_agent_instance_id, platform_device_id)
            self.RR2.assign_platform_agent_to_platform_agent_instance_with_has_agent_definition(
                platform_agent_id, platform_agent_instance_id)
            self.RR2.assign_platform_device_to_org_with_has_resource(
                platform_agent_instance_id, org_id)

            return platform_agent_instance_id, platform_agent_id, platform_device_id

        def _make_instrument_agent_structure(agent_config=None):
            if None is agent_config: agent_config = {}

            # instance creation
            instrument_agent_instance_obj = any_old(
                RT.InstrumentAgentInstance, {
                    "startup_config": inst_startup_config,
                    'alerts': generic_alerts_config
                })
            instrument_agent_instance_obj.agent_config = agent_config
            instrument_agent_instance_id = self.IMS.create_instrument_agent_instance(
                instrument_agent_instance_obj)

            # agent creation
            raw_config = StreamConfiguration(
                stream_name='raw',
                parameter_dictionary_name='ctd_raw_param_dict')
            instrument_agent_obj = any_old(
                RT.InstrumentAgent, {"stream_configurations": [raw_config]})
            instrument_agent_id = self.IMS.create_instrument_agent(
                instrument_agent_obj)

            # device creation
            instrument_device_id = self.IMS.create_instrument_device(
                any_old(RT.InstrumentDevice))

            # data product creation
            dp_obj = any_old(RT.DataProduct)
            dp_id = self.DP.create_data_product(
                data_product=dp_obj, stream_definition_id=raw_stream_def_id)
            self.DAMS.assign_data_product(
                input_resource_id=instrument_device_id, data_product_id=dp_id)
            self.DP.activate_data_product_persistence(data_product_id=dp_id)
            self.addCleanup(self.DP.suspend_data_product_persistence, dp_id)

            #deployment creation
            site_obj = IonObject(RT.InstrumentSite, name='siteInstrument')
            site_id = self.OMS.create_instrument_site(instrument_site=site_obj)

            # find current deployment using time constraints
            current_time = int(calendar.timegm(time.gmtime()))
            # two years on either side of current time
            start = current_time - 63115200
            end = current_time + 63115200
            temporal_bounds = IonObject(OT.TemporalBounds,
                                        name='planned',
                                        start_datetime=str(start),
                                        end_datetime=str(end))
            platform_port_obj = IonObject(
                OT.PlatformPort,
                reference_designator='GA01SUMO-FI003-08-CTDMO0888',
                port_type=PortTypeEnum.PAYLOAD,
                ip_address=0)
            deployment_obj = IonObject(
                RT.Deployment,
                name='TestDeployment for Cabled Instrument',
                description='some new deployment',
                context=IonObject(OT.CabledInstrumentDeploymentContext),
                constraint_list=[temporal_bounds],
                port_assignments={instrument_device_id: platform_port_obj})

            deploy_id = self.OMS.create_deployment(
                deployment=deployment_obj,
                site_id=site_id,
                device_id=instrument_device_id)

            # assignments
            self.RR2.assign_instrument_agent_instance_to_instrument_device_with_has_agent_instance(
                instrument_agent_instance_id, instrument_device_id)
            self.RR2.assign_instrument_agent_to_instrument_agent_instance_with_has_agent_definition(
                instrument_agent_id, instrument_agent_instance_id)
            self.RR2.assign_instrument_device_to_org_with_has_resource(
                instrument_agent_instance_id, org_id)

            return instrument_agent_instance_id, instrument_agent_id, instrument_device_id

        # can't do anything without an agent instance obj
        log.debug(
            "Testing that preparing a launcher without agent instance raises an error"
        )
        refresh_pconfig_builder_hack(
            config_builder
        )  # associations have changed since builder was instantiated
        self.assertRaises(AssertionError,
                          config_builder.p.prepare,
                          will_launch=False)

        log.debug(
            "Making the structure for a platform agent, which will be the child"
        )
        platform_agent_instance_child_id, _, platform_device_child_id = _make_platform_agent_structure(
            name='child')
        platform_agent_instance_child_obj = self.RR2.read(
            platform_agent_instance_child_id)

        log.debug("Preparing a valid agent instance launch, for config only")
        refresh_pconfig_builder_hack(
            config_builder
        )  # associations have changed since builder was instantiated
        config_builder.p.set_agent_instance_object(
            platform_agent_instance_child_obj)
        child_config = config_builder.p.prepare(will_launch=False)
        verify_child_config(child_config, platform_device_child_id)

        log.debug(
            "Making the structure for a platform agent, which will be the parent"
        )
        platform_agent_instance_parent_id, _, platform_device_parent_id = _make_platform_agent_structure(
            name='parent')
        platform_agent_instance_parent_obj = self.RR2.read(
            platform_agent_instance_parent_id)

        log.debug("Testing child-less parent as a child config")
        refresh_pconfig_builder_hack(
            config_builder
        )  # associations have changed since builder was instantiated
        config_builder.p.set_agent_instance_object(
            platform_agent_instance_parent_obj)
        parent_config = config_builder.p.prepare(will_launch=False)
        verify_child_config(parent_config, platform_device_parent_id)

        log.debug("assigning child platform to parent")
        assign_child_platform_to_parent_platform_fn(platform_device_child_id,
                                                    platform_device_parent_id)

        child_device_ids = find_child_platform_ids_of_parent_platform_fn(
            platform_device_parent_id)
        self.assertNotEqual(0, len(child_device_ids))

        log.debug("Testing parent + child as parent config")
        refresh_pconfig_builder_hack(
            config_builder
        )  # associations have changed since builder was instantiated
        config_builder.p.set_agent_instance_object(
            platform_agent_instance_parent_obj)
        parent_config = config_builder.p.prepare(will_launch=False)
        verify_parent_config(parent_config, platform_device_parent_id,
                             platform_device_child_id)

        log.debug("making the structure for an instrument agent")
        instrument_agent_instance_id, _, instrument_device_id = _make_instrument_agent_structure(
        )
        instrument_agent_instance_obj = self.RR2.read(
            instrument_agent_instance_id)

        log.debug("Testing instrument config")
        refresh_iconfig_builder_hack(
            config_builder
        )  # associations have changed since builder was instantiated
        config_builder.i.set_agent_instance_object(
            instrument_agent_instance_obj)
        instrument_config = config_builder.i.prepare(will_launch=False)
        verify_instrument_config(instrument_config, instrument_device_id)

        log.debug("assigning instrument to platform")
        self.RR2.assign_instrument_device_to_platform_device_with_has_device(
            instrument_device_id, platform_device_child_id)

        child_device_ids = self.RR2.find_instrument_device_ids_of_platform_device_using_has_device(
            platform_device_child_id)
        self.assertNotEqual(0, len(child_device_ids))

        log.debug("Testing entire config")
        refresh_pconfig_builder_hack(
            config_builder
        )  # associations have changed since builder was instantiated
        config_builder.p.set_agent_instance_object(
            platform_agent_instance_parent_obj)
        full_config = config_builder.p.prepare(will_launch=False)
        verify_parent_config(full_config, platform_device_parent_id,
                             platform_device_child_id, instrument_device_id)

        #self.fail(parent_config)
        #plauncher.prepare(will_launch=False)
        log.info("END base_agent_instance_config")
Esempio n. 13
0
class TestResourceRegistry(IonIntegrationTestCase):
    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2coi.yml')

        # Now create client to bank service
        self.resource_registry_service = ResourceRegistryServiceClient(
            node=self.container.node)

    def test_crud(self):
        # Some quick registry tests
        # Can't call new with fields that aren't defined in the object's schema
        with self.assertRaises(TypeError) as cm:
            IonObject("UserInfo", name="name", foo="bar")
        self.assertTrue(cm.exception.message ==
                        "__init__() got an unexpected keyword argument 'foo'")

        # Can't call new with fields that aren't defined in the object's schema
        with self.assertRaises(TypeError) as cm:
            IonObject("UserInfo", {"name": "name", "foo": "bar"})
        self.assertTrue(cm.exception.message ==
                        "__init__() got an unexpected keyword argument 'foo'")

        # Can't call new with fields that aren't defined in the object's schema
        with self.assertRaises(TypeError) as cm:
            IonObject("UserInfo", {"name": "name"}, foo="bar")
        self.assertTrue(cm.exception.message ==
                        "__init__() got an unexpected keyword argument 'foo'")

        # Instantiate an object
        obj = IonObject("UserInfo", name="name")

        # Can set attributes that aren't in the object's schema
        with self.assertRaises(AttributeError) as cm:
            setattr(obj, "foo", "bar")
        self.assertTrue(
            cm.exception.message == "'UserInfo' object has no attribute 'foo'")

        # Cam't call update with object that hasn't been persisted
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.update(obj)
        self.assertTrue(
            cm.exception.message.startswith(
                "Object does not have required '_id' or '_rev' attribute"))

        # Persist object and read it back
        obj_id, obj_rev = self.resource_registry_service.create(obj)
        read_obj = self.resource_registry_service.read(obj_id)

        # Cannot create object with _id and _rev fields pre-set
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create(read_obj)
        self.assertTrue(
            cm.exception.message.startswith("Doc must not have '_id'"))

        # Update object
        read_obj.name = "John Doe"
        self.resource_registry_service.update(read_obj)

        # Update should fail with revision mismatch
        with self.assertRaises(Conflict) as cm:
            self.resource_registry_service.update(read_obj)
        self.assertTrue(
            cm.exception.message.startswith(
                "Object not based on most current version"))

        # Re-read and update object
        read_obj = self.resource_registry_service.read(obj_id)
        self.resource_registry_service.update(read_obj)

        # Delete object
        self.resource_registry_service.delete(obj_id)

        # Make sure read, update and delete report error
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.read(obj_id)
        self.assertTrue(cm.exception.message.startswith("Object with id"))

        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.update(read_obj)
        self.assertTrue(cm.exception.message.startswith("Object with id"))

        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.delete(obj_id)
        self.assertTrue(cm.exception.message.startswith("Object with id"))

        # Owner creation tests
        user = IonObject("ActorIdentity", name='user')
        uid, _ = self.resource_registry_service.create(user)

        inst = IonObject("InstrumentDevice", name='instrument')
        iid, _ = self.resource_registry_service.create(
            inst, headers={'ion-actor-id': str(uid)})

        ids, _ = self.resource_registry_service.find_objects(iid,
                                                             PRED.hasOwner,
                                                             RT.ActorIdentity,
                                                             id_only=True)
        self.assertEquals(len(ids), 1)

        assoc = self.resource_registry_service.read(ids[0])
        self.resource_registry_service.delete(iid)

        with self.assertRaises(NotFound) as ex:
            assoc = self.resource_registry_service.read(iid)

    def test_lifecycle(self):
        att = IonObject("InstrumentDevice", name='mine', description='desc')

        rid, rev = self.resource_registry_service.create(att)

        att1 = self.resource_registry_service.read(rid)
        self.assertEquals(att1.name, att.name)
        self.assertEquals(att1.lcstate, LCS.DRAFT_PRIVATE)

        new_state = self.resource_registry_service.execute_lifecycle_transition(
            rid, LCE.PLAN)
        self.assertEquals(new_state, LCS.PLANNED_PRIVATE)

        att2 = self.resource_registry_service.read(rid)
        self.assertEquals(att2.lcstate, LCS.PLANNED_PRIVATE)

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.execute_lifecycle_transition(
                rid, LCE.UNANNOUNCE)
        self.assertTrue(
            "type=InstrumentDevice, lcstate=PLANNED_PRIVATE has no transition for event unannounce"
            in cm.exception.message)

        new_state = self.resource_registry_service.execute_lifecycle_transition(
            rid, LCE.DEVELOP)
        self.assertEquals(new_state, LCS.DEVELOPED_PRIVATE)

        self.assertRaises(
            iex.BadRequest,
            self.resource_registry_service.execute_lifecycle_transition,
            resource_id=rid,
            transition_event='NONE##')

        self.resource_registry_service.set_lifecycle_state(
            rid, LCS.INTEGRATED_PRIVATE)
        att1 = self.resource_registry_service.read(rid)
        self.assertEquals(att1.lcstate, LCS.INTEGRATED_PRIVATE)

    def test_association(self):
        # Instantiate ActorIdentity object
        actor_identity_obj = IonObject("ActorIdentity", name="name")
        actor_identity_obj_id, actor_identity_obj_rev = self.resource_registry_service.create(
            actor_identity_obj)
        read_actor_identity_obj = self.resource_registry_service.read(
            actor_identity_obj_id)

        # Instantiate UserInfo object
        user_info_obj = IonObject("UserInfo", name="name")
        user_info_obj_id, user_info_obj_rev = self.resource_registry_service.create(
            user_info_obj)
        read_user_info_obj = self.resource_registry_service.read(
            user_info_obj_id)

        # Test create failures
        with self.assertRaises(AttributeError) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj_id, PRED.bogus, user_info_obj_id)
        self.assertTrue(cm.exception.message == "bogus")

        # Predicate not provided
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj_id, None, user_info_obj_id)
        self.assertTrue(
            cm.exception.message == "Association must have all elements set")

        # Bad association type
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj_id, PRED.hasInfo, user_info_obj_id,
                'bogustype')
        self.assertTrue(
            cm.exception.message == "Unsupported assoc_type: bogustype")

        # Subject id or object not provided
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                None, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(
            cm.exception.message == "Association must have all elements set")

        # Object id or object not provided
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj_id, PRED.hasInfo, None)
        self.assertTrue(
            cm.exception.message == "Association must have all elements set")

        # Bad subject id
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.create_association(
                "bogus", PRED.hasInfo, user_info_obj_id)
        self.assertTrue(
            cm.exception.message == "Object with id bogus does not exist.")

        # Bad object id
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj_id, PRED.hasInfo, "bogus")
        self.assertTrue(
            cm.exception.message == "Object with id bogus does not exist.")

        # _id missing from subject
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(
            cm.exception.message == "Subject id or rev not available")

        # _id missing from object
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj_id, PRED.hasInfo, user_info_obj)
        self.assertTrue(
            cm.exception.message == "Object id or rev not available")

        # Wrong subject type
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                user_info_obj_id, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(cm.exception.message ==
                        "Illegal subject type UserInfo for predicate hasInfo")

        # Wrong object type
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj_id, PRED.hasInfo, actor_identity_obj_id)
        self.assertTrue(
            cm.exception.message ==
            "Illegal object type ActorIdentity for predicate hasInfo")

        # Create two different association types between the same subject and predicate
        assoc_id1, assoc_rev1 = self.resource_registry_service.create_association(
            actor_identity_obj_id, PRED.hasInfo, user_info_obj_id)

        # Read object, subject
        res_obj1 = self.resource_registry_service.read_object(
            actor_identity_obj_id, PRED.hasInfo, RT.UserInfo)
        self.assertEquals(res_obj1._id, user_info_obj_id)
        res_obj1 = self.resource_registry_service.read_object(
            actor_identity_obj_id, PRED.hasInfo, RT.UserInfo, id_only=True)
        self.assertEquals(res_obj1, user_info_obj_id)
        res_obj2 = self.resource_registry_service.read_subject(
            RT.ActorIdentity, PRED.hasInfo, user_info_obj_id)
        self.assertEquals(res_obj2._id, actor_identity_obj_id)
        res_obj2 = self.resource_registry_service.read_subject(
            RT.ActorIdentity, PRED.hasInfo, user_info_obj_id, id_only=True)
        self.assertEquals(res_obj2, actor_identity_obj_id)

        # Create a similar association to a specific revision
        # TODO: This is not a supported case so far
        assoc_id2, assoc_rev2 = self.resource_registry_service.create_association(
            actor_identity_obj_id, PRED.hasInfo, user_info_obj_id, "H2R")

        # Search for associations (good cases)
        ret1 = self.resource_registry_service.find_associations(
            actor_identity_obj_id, PRED.hasInfo, user_info_obj_id)
        ret2 = self.resource_registry_service.find_associations(
            actor_identity_obj_id, PRED.hasInfo)
        ret3 = self.resource_registry_service.find_associations(
            None, PRED.hasInfo)
        self.assertTrue(len(ret1) == len(ret2) == len(ret3))
        self.assertTrue(ret1[0]._id == ret2[0]._id == ret3[0]._id)

        ret1 = self.resource_registry_service.find_associations(
            actor_identity_obj_id, PRED.hasInfo, user_info_obj_id, None, False)
        ret2 = self.resource_registry_service.find_associations(
            actor_identity_obj_id, PRED.hasInfo, id_only=False)
        ret3 = self.resource_registry_service.find_associations(
            predicate=PRED.hasInfo, id_only=False)
        self.assertTrue(ret1 == ret2 == ret3)

        # Search for associations (good cases)
        ret1 = self.resource_registry_service.find_associations(
            read_actor_identity_obj, PRED.hasInfo, read_user_info_obj)
        ret2 = self.resource_registry_service.find_associations(
            read_actor_identity_obj, PRED.hasInfo)
        ret3 = self.resource_registry_service.find_associations(
            None, PRED.hasInfo)
        self.assertTrue(len(ret1) == len(ret2) == len(ret3))
        self.assertTrue(ret1[0]._id == ret2[0]._id == ret3[0]._id)

        ret1 = self.resource_registry_service.find_associations(
            actor_identity_obj_id, PRED.hasInfo, read_user_info_obj, None,
            True)
        ret2 = self.resource_registry_service.find_associations(
            actor_identity_obj_id, PRED.hasInfo, id_only=True)
        ret3 = self.resource_registry_service.find_associations(
            predicate=PRED.hasInfo, id_only=True)
        self.assertTrue(ret1 == ret2 == ret3)

        # Search for associations (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_associations(None, None, None)
        self.assertTrue(cm.exception.message == "Illegal parameters")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_associations(
                actor_identity_obj_id, None, None)
        self.assertTrue(cm.exception.message == "Illegal parameters")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_associations(
                None, None, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Illegal parameters")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_associations(
                actor_identity_obj, None, user_info_obj_id)
        self.assertTrue(
            cm.exception.message == "Object id not available in subject")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_associations(
                actor_identity_obj_id, None, user_info_obj)
        self.assertTrue(
            cm.exception.message == "Object id not available in object")

        # Find subjects (good cases)
        subj_ret1 = self.resource_registry_service.find_subjects(
            RT.ActorIdentity, PRED.hasInfo, user_info_obj_id, True)
        subj_ret2 = self.resource_registry_service.find_subjects(
            RT.ActorIdentity, PRED.hasInfo, read_user_info_obj, True)
        self.assertTrue(len(subj_ret1) == len(subj_ret2))
        self.assertTrue(subj_ret1[0] == subj_ret2[0])
        self.assertTrue(subj_ret1[1][0]._id == subj_ret2[1][0]._id)

        subj_ret3 = self.resource_registry_service.find_subjects(
            None, PRED.hasInfo, user_info_obj_id, True)
        subj_ret4 = self.resource_registry_service.find_subjects(
            None, None, read_user_info_obj, True)
        self.assertTrue(len(subj_ret3) == len(subj_ret4))
        self.assertTrue(subj_ret3[0] == subj_ret4[0])
        self.assertTrue(subj_ret3[1][0]._id == subj_ret4[1][0]._id)

        subj_ret5 = self.resource_registry_service.find_subjects(
            None, PRED.hasInfo, user_info_obj_id, False)
        subj_ret6 = self.resource_registry_service.find_subjects(
            None, None, read_user_info_obj, False)
        self.assertTrue(len(subj_ret5) == len(subj_ret6))
        self.assertTrue(subj_ret5[0][0]._id == subj_ret6[0][0]._id)
        self.assertTrue(subj_ret5[1][0]._id == subj_ret6[1][0]._id)

        # Find subjects (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_subjects(None, None, None)
        self.assertTrue(cm.exception.message == "Must provide object")

        with self.assertRaises(AttributeError) as cm:
            self.resource_registry_service.find_subjects(
                RT.UserCredentials, PRED.bogus, user_info_obj_id, True)
        self.assertTrue(cm.exception.message == "bogus")

        ret = self.resource_registry_service.find_subjects(
            RT.UserInfo, PRED.hasCredentials, user_info_obj_id, True)
        self.assertTrue(len(ret[0]) == 0)

        ret = self.resource_registry_service.find_subjects(
            RT.UserCredentials, PRED.hasInfo, user_info_obj_id, True)
        self.assertTrue(len(ret[0]) == 0)

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_subjects(
                RT.UserCredentials, PRED.hasInfo, user_info_obj, True)
        self.assertTrue(
            cm.exception.message == "Object id not available in object")

        # Find objects (good cases)
        subj_ret1 = self.resource_registry_service.find_objects(
            actor_identity_obj_id, PRED.hasInfo, RT.UserInfo, True)
        subj_ret2 = self.resource_registry_service.find_objects(
            read_actor_identity_obj, PRED.hasInfo, RT.UserInfo, True)
        self.assertTrue(len(subj_ret1) == len(subj_ret2))
        self.assertTrue(subj_ret1[0] == subj_ret2[0])
        self.assertTrue(subj_ret1[1][0]._id == subj_ret2[1][0]._id)

        subj_ret3 = self.resource_registry_service.find_objects(
            actor_identity_obj_id, PRED.hasInfo, None, True)
        subj_ret4 = self.resource_registry_service.find_objects(
            actor_identity_obj_id, None, None, True)
        self.assertTrue(len(subj_ret3) == len(subj_ret4))
        self.assertTrue(subj_ret3[0] == subj_ret4[0])
        self.assertTrue(subj_ret3[1][0]._id == subj_ret4[1][0]._id)

        subj_ret5 = self.resource_registry_service.find_objects(
            actor_identity_obj_id, PRED.hasInfo, None, False)
        subj_ret6 = self.resource_registry_service.find_objects(
            read_actor_identity_obj, None, None, False)
        self.assertTrue(len(subj_ret5) == len(subj_ret6))
        self.assertTrue(subj_ret5[0][0]._id == subj_ret6[0][0]._id)
        self.assertTrue(subj_ret5[1][0]._id == subj_ret6[1][0]._id)

        # Find objects (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_objects(None, None, None)
        self.assertTrue(cm.exception.message == "Must provide subject")

        with self.assertRaises(AttributeError) as cm:
            self.resource_registry_service.find_objects(
                actor_identity_obj_id, PRED.bogus, RT.UserCredentials, True)
        self.assertTrue(cm.exception.message == "bogus")

        ret = self.resource_registry_service.find_objects(
            actor_identity_obj_id, PRED.hasCredentials, RT.ActorIdentity, True)
        self.assertTrue(len(ret[0]) == 0)

        ret = self.resource_registry_service.find_objects(
            actor_identity_obj_id, PRED.hasInfo, RT.UserCredentials, True)
        self.assertTrue(len(ret[0]) == 0)

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_objects(
                actor_identity_obj, PRED.hasInfo, RT.UserInfo, True)
        self.assertTrue(
            cm.exception.message == "Object id not available in subject")

        # Get association (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.get_association(None, None, None)
        self.assertTrue(cm.exception.message == "Illegal parameters")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.get_association(
                actor_identity_obj_id, None, None)
        self.assertTrue(cm.exception.message == "Illegal parameters")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.get_association(
                None, None, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Illegal parameters")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.get_association(
                actor_identity_obj, None, user_info_obj_id)
        self.assertTrue(
            cm.exception.message == "Object id not available in subject")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.get_association(
                actor_identity_obj_id, None, user_info_obj)
        self.assertTrue(
            cm.exception.message == "Object id not available in object")

        # Delete one of the associations
        self.resource_registry_service.delete_association(assoc_id2)

        assoc = self.resource_registry_service.get_association(
            actor_identity_obj_id, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(assoc._id == assoc_id1)

        # Delete (bad cases)
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.delete_association("bogus")
        self.assertTrue(
            cm.exception.message == "Object with id bogus does not exist.")

        # Delete other association
        self.resource_registry_service.delete_association(assoc_id1)

        # Delete resources
        self.resource_registry_service.delete(actor_identity_obj_id)
        self.resource_registry_service.delete(user_info_obj_id)

    def test_find_resources(self):
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_resources(
                RT.UserInfo, LCS.DRAFT, "name", False)
        self.assertTrue(
            cm.exception.message == "find by name does not support lcstate")

        ret = self.resource_registry_service.find_resources(
            RT.UserInfo, None, "name", False)
        self.assertTrue(len(ret[0]) == 0)

        # Instantiate an object
        obj = IonObject("InstrumentDevice", name="name")

        # Persist object and read it back
        obj_id, obj_rev = self.resource_registry_service.create(obj)
        read_obj = self.resource_registry_service.read(obj_id)

        ret = self.resource_registry_service.find_resources(
            RT.InstrumentDevice, None, "name", False)
        self.assertTrue(len(ret[0]) == 1)
        self.assertTrue(ret[0][0]._id == read_obj._id)

        ret = self.resource_registry_service.find_resources(
            RT.InstrumentDevice, LCS.DRAFT, None, False)
        self.assertTrue(len(ret[0]) == 1)
        self.assertTrue(ret[0][0]._id == read_obj._id)

    def test_attach(self):

        binary = "\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x10\x00\x00\x00\x10\x08\x03\x00\x00\x00(-\x0fS\x00\x00\x00\x03sBIT\x08\x08\x08\xdb\xe1O\xe0\x00\x00\x00~PLTEf3\x00\xfc\xf7\xe0\xee\xcc\x00\xd3\xa0\x00\xcc\x99\x00\xec\xcdc\x9fl\x00\xdd\xb2\x00\xff\xff\xff|I\x00\xf9\xdb\x00\xdd\xb5\x19\xd9\xad\x10\xb6\x83\x00\xf8\xd6\x00\xf2\xc5\x00\xd8\xab\x00n;\x00\xff\xcc\x00\xd6\xa4\t\xeb\xb8\x00\x83Q\x00\xadz\x00\xff\xde\x00\xff\xd6\x00\xd6\xa3\x00\xdf\xaf\x00\xde\xad\x10\xbc\x8e\x00\xec\xbe\x00\xec\xd4d\xff\xe3\x00tA\x00\xf6\xc4\x00\xf6\xce\x00\xa5u\x00\xde\xa5\x00\xf7\xbd\x00\xd6\xad\x08\xdd\xaf\x19\x8cR\x00\xea\xb7\x00\xee\xe9\xdf\xc5\x00\x00\x00\tpHYs\x00\x00\n\xf0\x00\x00\n\xf0\x01B\xac4\x98\x00\x00\x00\x1ctEXtSoftware\x00Adobe Fireworks CS4\x06\xb2\xd3\xa0\x00\x00\x00\x15tEXtCreation Time\x0029/4/09Oq\xfdE\x00\x00\x00\xadIDAT\x18\x95M\x8f\x8d\x0e\x820\x0c\x84;ZdC~f\x07\xb2\x11D\x86\x89\xe8\xfb\xbf\xa0+h\xe2\x97\\\xd2^\x93\xb6\x07:1\x9f)q\x9e\xa5\x06\xad\xd5\x13\x8b\xac,\xb3\x02\x9d\x12C\xa1-\xef;M\x08*\x19\xce\x0e?\x1a\xeb4\xcc\xd4\x0c\x831\x87V\xca\xa1\x1a\xd3\x08@\xe4\xbd\xb7\x15P;\xc8\xd4{\x91\xbf\x11\x90\xffg\xdd\x8di\xfa\xb6\x0bs2Z\xff\xe8yg2\xdc\x11T\x96\xc7\x05\xa5\xef\x96+\xa7\xa59E\xae\xe1\x84cm^1\xa6\xb3\xda\x85\xc8\xd8/\x17se\x0eN^'\x8c\xc7\x8e\x88\xa8\xf6p\x8e\xc2;\xc6.\xd0\x11.\x91o\x12\x7f\xcb\xa5\xfe\x00\x89]\x10:\xf5\x00\x0e\xbf\x00\x00\x00\x00IEND\xaeB`\x82"

        # Owner creation tests
        instrument = IonObject("InstrumentDevice", name='instrument')
        iid, _ = self.resource_registry_service.create(instrument)

        att = Attachment(content=binary, attachment_type=AttachmentType.BLOB)
        aid1 = self.resource_registry_service.create_attachment(iid, att)

        att1 = self.resource_registry_service.read_attachment(aid1)
        self.assertEquals(binary, att1.content)

        import base64
        att = Attachment(content=base64.encodestring(binary),
                         attachment_type=AttachmentType.ASCII)
        aid2 = self.resource_registry_service.create_attachment(iid, att)

        att1 = self.resource_registry_service.read_attachment(aid2)
        self.assertEquals(binary, base64.decodestring(att1.content))

        att_ids = self.resource_registry_service.find_attachments(iid,
                                                                  id_only=True)
        self.assertEquals(att_ids, [aid1, aid2])

        att_ids = self.resource_registry_service.find_attachments(
            iid, id_only=True, descending=True)
        self.assertEquals(att_ids, [aid2, aid1])

        att_ids = self.resource_registry_service.find_attachments(
            iid, id_only=True, descending=True, limit=1)
        self.assertEquals(att_ids, [aid2])

        atts = self.resource_registry_service.find_attachments(iid,
                                                               id_only=False,
                                                               limit=1)
        self.assertEquals(atts[0].content, att1.content)

        self.resource_registry_service.delete_attachment(aid1)

        att_ids = self.resource_registry_service.find_attachments(iid,
                                                                  id_only=True)
        self.assertEquals(att_ids, [aid2])

        self.resource_registry_service.delete_attachment(aid2)

        att_ids = self.resource_registry_service.find_attachments(iid,
                                                                  id_only=True)
        self.assertEquals(att_ids, [])

    def test_read_mult(self):
        test_resource1_id, _ = self.resource_registry_service.create(
            Resource(name='test1'))
        test_resource2_id, _ = self.resource_registry_service.create(
            Resource(name='test2'))

        res_list = [test_resource1_id, test_resource2_id]

        objects = self.resource_registry_service.read_mult(res_list)

        for o in objects:
            self.assertIsInstance(o, Resource)
            self.assertTrue(o._id in res_list)

    def test_find_associations_mult(self):
        dp = DataProcess()
        transform = Transform()
        pd = ProcessDefinition()

        dp_id, _ = self.resource_registry_service.create(dp)
        transform_id, _ = self.resource_registry_service.create(transform)
        pd_id, _ = self.resource_registry_service.create(pd)

        self.resource_registry_service.create_association(
            subject=dp_id, object=transform_id, predicate=PRED.hasTransform)
        self.resource_registry_service.create_association(
            subject=transform_id,
            object=pd_id,
            predicate=PRED.hasProcessDefinition)

        results, _ = self.resource_registry_service.find_associations_mult(
            subjects=[dp_id], id_only=True)
        self.assertTrue(results == [transform_id])

        results, _ = self.resource_registry_service.find_associations_mult(
            subjects=[dp_id, transform_id], id_only=True)
        results.sort()
        correct = [transform_id, pd_id]
        correct.sort()
        self.assertTrue(results == correct)
class TestTransformWorker(IonIntegrationTestCase):
    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        # Instantiate a process to represent the test
        process = TransformWorkerTestProcess()

        self.dataset_management_client = DatasetManagementServiceClient(
            node=self.container.node)
        self.pubsub_client = PubsubManagementServiceClient(
            node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(
            node=self.container.node)
        self.processdispatchclient = ProcessDispatcherServiceClient(
            node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(
            node=self.container.node)
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceProcessClient(
            node=self.container.node, process=process)

        self.time_dom, self.spatial_dom = time_series_domain()

        self.ph = ParameterHelper(self.dataset_management_client,
                                  self.addCleanup)

        self.wait_time = CFG.get_safe('endpoint.receive.timeout', 10)

    def push_granule(self, data_product_id):
        '''
        Publishes and monitors that the granule arrived
        '''
        datasets, _ = self.rrclient.find_objects(data_product_id,
                                                 PRED.hasDataset,
                                                 id_only=True)
        dataset_monitor = DatasetMonitor(datasets[0])

        rdt = self.ph.rdt_for_data_product(data_product_id)
        self.ph.fill_parsed_rdt(rdt)
        self.ph.publish_rdt_to_data_product(data_product_id, rdt)

        assert dataset_monitor.wait()
        dataset_monitor.stop()

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),
                     'Skip test while in CEI LAUNCH mode')
    def test_transform_worker(self):

        # test that a data process (type: data-product-in / data-product-out) can be defined and launched.
        # verify that the output granule fields are correctly populated

        # test that the input and output data products are linked to facilitate provenance

        self.dp_list = []
        self.data_process_objs = []
        self._output_stream_ids = []
        self.granule_verified = Event()
        self.worker_assigned_event_verified = Event()
        self.dp_created_event_verified = Event()
        self.heartbeat_event_verified = Event()

        self.parameter_dict_id = self.dataset_management_client.read_parameter_dictionary_by_name(
            name='ctd_parsed_param_dict', id_only=True)

        # create the StreamDefinition
        self.stream_def_id = self.pubsub_client.create_stream_definition(
            name='stream_def', parameter_dictionary_id=self.parameter_dict_id)
        self.addCleanup(self.pubsub_client.delete_stream_definition,
                        self.stream_def_id)

        # create the DataProduct that is the input to the data processes
        input_dp_obj = IonObject(RT.DataProduct,
                                 name='input_data_product',
                                 description='input test stream')
        self.input_dp_id = self.dataproductclient.create_data_product(
            data_product=input_dp_obj, stream_definition_id=self.stream_def_id)

        # retrieve the Stream for this data product
        stream_ids, assoc_ids = self.rrclient.find_objects(
            self.input_dp_id, PRED.hasStream, RT.Stream, True)
        self.stream_id = stream_ids[0]

        self.start_event_listener()

        # create the DPD, DataProcess and output DataProduct
        dataprocessdef_id, dataprocess_id, dataproduct_id = self.create_data_process(
        )
        self.dp_list.append(dataprocess_id)

        # validate the repository for data product algorithms persists the new resources  NEW SA-1
        # create_data_process call created one of each
        dpd_ids, _ = self.rrclient.find_resources(
            restype=OT.DataProcessDefinition, id_only=False)
        # there will be more than one becuase of the DPDs that reperesent the PFs in the data product above
        self.assertTrue(dpd_ids is not None)
        dp_ids, _ = self.rrclient.find_resources(restype=OT.DataProcess,
                                                 id_only=False)
        # only one DP becuase the PFs that are in the code dataproduct above are not activated yet.
        self.assertEquals(len(dp_ids), 1)

        # validate the name and version label  NEW SA - 2
        dataprocessdef_obj = self.dataprocessclient.read_data_process_definition(
            dataprocessdef_id)
        self.assertEqual(dataprocessdef_obj.version_label, '1.0a')
        self.assertEqual(dataprocessdef_obj.name, 'add_arrays')

        # validate that the DPD has an attachment  NEW SA - 21
        attachment_ids, assoc_ids = self.rrclient.find_objects(
            dataprocessdef_id, PRED.hasAttachment, RT.Attachment, True)
        self.assertEqual(len(attachment_ids), 1)
        attachment_obj = self.rrclient.read_attachment(attachment_ids[0])
        log.debug('attachment: %s', attachment_obj)

        # validate that the data process resource has input and output data products associated
        # L4-CI-SA-RQ-364  and NEW SA-3
        outproduct_ids, assoc_ids = self.rrclient.find_objects(
            dataprocess_id, PRED.hasOutputProduct, RT.DataProduct, True)
        self.assertEqual(len(outproduct_ids), 1)
        inproduct_ids, assoc_ids = self.rrclient.find_objects(
            dataprocess_id, PRED.hasInputProduct, RT.DataProduct, True)
        self.assertEqual(len(inproduct_ids), 1)

        # Test for provenance. Get Data product produced by the data processes
        output_data_product_id, _ = self.rrclient.find_objects(
            subject=dataprocess_id,
            object_type=RT.DataProduct,
            predicate=PRED.hasOutputProduct,
            id_only=True)

        output_data_product_provenance = self.dataproductclient.get_data_product_provenance(
            output_data_product_id[0])

        # Do a basic check to see if there were 3 entries in the provenance graph. Parent and Child and the
        # DataProcessDefinition creating the child from the parent.
        self.assertTrue(len(output_data_product_provenance) == 2)
        self.assertTrue(self.input_dp_id in output_data_product_provenance[
            output_data_product_id[0]]['parents'])
        self.assertTrue(output_data_product_provenance[
            output_data_product_id[0]]['parents'][self.input_dp_id]
                        ['data_process_definition_id'] == dataprocessdef_id)

        # NEW SA - 4 | Data processing shall include the appropriate data product algorithm name and version number in
        # the metadata of each output data product created by the data product algorithm.
        output_data_product_obj, _ = self.rrclient.find_objects(
            subject=dataprocess_id,
            object_type=RT.DataProduct,
            predicate=PRED.hasOutputProduct,
            id_only=False)
        self.assertTrue(output_data_product_obj[0].name != None)
        self.assertTrue(output_data_product_obj[0]._rev != None)

        # retrieve subscription from data process
        subscription_objs, _ = self.rrclient.find_objects(
            subject=dataprocess_id,
            predicate=PRED.hasSubscription,
            object_type=RT.Subscription,
            id_only=False)
        log.debug('test_transform_worker subscription_obj:  %s',
                  subscription_objs[0])

        # create a queue to catch the published granules
        self.subscription_id = self.pubsub_client.create_subscription(
            name='parsed_subscription',
            stream_ids=[self.stream_id],
            exchange_name=subscription_objs[0].exchange_name)
        self.addCleanup(self.pubsub_client.delete_subscription,
                        self.subscription_id)

        self.pubsub_client.activate_subscription(self.subscription_id)
        self.addCleanup(self.pubsub_client.deactivate_subscription,
                        self.subscription_id)

        stream_route = self.pubsub_client.read_stream_route(self.stream_id)
        self.publisher = StandaloneStreamPublisher(stream_id=self.stream_id,
                                                   stream_route=stream_route)

        for n in range(1, 101):
            rdt = RecordDictionaryTool(stream_definition_id=self.stream_def_id)
            rdt['time'] = [0]  # time should always come first
            rdt['conductivity'] = [1]
            rdt['pressure'] = [2]
            rdt['salinity'] = [8]

            self.publisher.publish(rdt.to_granule())

        # validate that the output granule is received and the updated value is correct
        self.assertTrue(self.granule_verified.wait(self.wait_time))

        # validate that the data process loaded into worker event is received    (L4-CI-SA-RQ-182)
        self.assertTrue(
            self.worker_assigned_event_verified.wait(self.wait_time))

        # validate that the data process create (with data product ids) event is received    (NEW SA -42)
        self.assertTrue(self.dp_created_event_verified.wait(self.wait_time))

        # validate that the data process heartbeat event is received (for every hundred granules processed) (L4-CI-SA-RQ-182)
        #this takes a while so set wait limit to large value
        self.assertTrue(self.heartbeat_event_verified.wait(200))

        # validate that the code from the transform function can be retrieve via inspect_data_process_definition
        src = self.dataprocessclient.inspect_data_process_definition(
            dataprocessdef_id)
        self.assertIn('def add_arrays(a, b)', src)

        # now delete the DPD and DP then verify that the resources are retired so that information required for provenance are still available
        self.dataprocessclient.delete_data_process(dataprocess_id)
        self.dataprocessclient.delete_data_process_definition(
            dataprocessdef_id)

        in_dp_objs, _ = self.rrclient.find_objects(
            subject=dataprocess_id,
            predicate=PRED.hasInputProduct,
            object_type=RT.DataProduct,
            id_only=True)
        self.assertTrue(in_dp_objs is not None)

        dpd_objs, _ = self.rrclient.find_subjects(
            subject_type=RT.DataProcessDefinition,
            predicate=PRED.hasDataProcess,
            object=dataprocess_id,
            id_only=True)
        self.assertTrue(dpd_objs is not None)

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),
                     'Skip test while in CEI LAUNCH mode')
    def test_transform_worker_with_instrumentdevice(self):

        # test that a data process (type: data-product-in / data-product-out) can be defined and launched.
        # verify that the output granule fields are correctly populated

        # test that the input and output data products are linked to facilitate provenance

        self.data_process_objs = []
        self._output_stream_ids = []
        self.event_verified = Event()

        # Create CTD Parsed as the initial data product
        # create a stream definition for the data from the ctd simulator
        self.parameter_dict_id = self.dataset_management_client.read_parameter_dictionary_by_name(
            'ctd_parsed_param_dict', id_only=True)
        self.stream_def_id = self.pubsub_client.create_stream_definition(
            name='stream_def', parameter_dictionary_id=self.parameter_dict_id)

        # create the DataProduct that is the input to the data processes
        input_dp_obj = IonObject(RT.DataProduct,
                                 name='input_data_product',
                                 description='input test stream')
        self.input_dp_id = self.dataproductclient.create_data_product(
            data_product=input_dp_obj, stream_definition_id=self.stream_def_id)

        # retrieve the Stream for this data product
        stream_ids, assoc_ids = self.rrclient.find_objects(
            self.input_dp_id, PRED.hasStream, RT.Stream, True)
        self.stream_id = stream_ids[0]

        log.debug('new ctd_parsed_data_product_id = %s' % self.input_dp_id)

        # only ever need one device for testing purposes.
        instDevice_obj, _ = self.rrclient.find_resources(
            restype=RT.InstrumentDevice, name='test_ctd_device')
        if instDevice_obj:
            instDevice_id = instDevice_obj[0]._id
        else:
            instDevice_obj = IonObject(RT.InstrumentDevice,
                                       name='test_ctd_device',
                                       description="test_ctd_device",
                                       serial_number="12345")
            instDevice_id = self.imsclient.create_instrument_device(
                instrument_device=instDevice_obj)

        self.damsclient.assign_data_product(input_resource_id=instDevice_id,
                                            data_product_id=self.input_dp_id)

        # create the DPD, DataProcess and output DataProduct
        dataprocessdef_id, dataprocess_id, dataproduct_id = self.create_data_process(
        )

        self.addCleanup(self.dataprocessclient.delete_data_process,
                        dataprocess_id)
        self.addCleanup(self.dataprocessclient.delete_data_process_definition,
                        dataprocessdef_id)

        # Test for provenance. Get Data product produced by the data processes
        output_data_product_id, _ = self.rrclient.find_objects(
            subject=dataprocess_id,
            object_type=RT.DataProduct,
            predicate=PRED.hasOutputProduct,
            id_only=True)

        output_data_product_provenance = self.dataproductclient.get_data_product_provenance(
            output_data_product_id[0])

        # Do a basic check to see if there were 3 entries in the provenance graph. Parent and Child and the
        # DataProcessDefinition creating the child from the parent.
        self.assertTrue(len(output_data_product_provenance) == 3)
        self.assertTrue(self.input_dp_id in output_data_product_provenance[
            output_data_product_id[0]]['parents'])
        self.assertTrue(instDevice_id in output_data_product_provenance[
            self.input_dp_id]['parents'])
        self.assertTrue(output_data_product_provenance[instDevice_id]['type']
                        == 'InstrumentDevice')

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),
                     'Skip test while in CEI LAUNCH mode')
    def test_transform_worker_with_platformdevice(self):

        # test that a data process (type: data-product-in / data-product-out) can be defined and launched.
        # verify that the output granule fields are correctly populated

        # test that the input and output data products are linked to facilitate provenance

        self.data_process_objs = []
        self._output_stream_ids = []
        self.event_verified = Event()

        # Create CTD Parsed as the initial data product
        # create a stream definition for the data from the ctd simulator
        self.parameter_dict_id = self.dataset_management_client.read_parameter_dictionary_by_name(
            'ctd_parsed_param_dict', id_only=True)
        self.stream_def_id = self.pubsub_client.create_stream_definition(
            name='stream_def', parameter_dictionary_id=self.parameter_dict_id)

        # create the DataProduct that is the input to the data processes
        input_dp_obj = IonObject(RT.DataProduct,
                                 name='input_data_product',
                                 description='input test stream')
        self.input_dp_id = self.dataproductclient.create_data_product(
            data_product=input_dp_obj, stream_definition_id=self.stream_def_id)

        # retrieve the Stream for this data product
        stream_ids, assoc_ids = self.rrclient.find_objects(
            self.input_dp_id, PRED.hasStream, RT.Stream, True)
        self.stream_id = stream_ids[0]

        log.debug('new ctd_parsed_data_product_id = %s' % self.input_dp_id)

        # only ever need one device for testing purposes.
        platform_device_obj, _ = self.rrclient.find_resources(
            restype=RT.PlatformDevice, name='TestPlatform')
        if platform_device_obj:
            platform_device_id = platform_device_obj[0]._id
        else:
            platform_device_obj = IonObject(RT.PlatformDevice,
                                            name='TestPlatform',
                                            description="TestPlatform",
                                            serial_number="12345")
            platform_device_id = self.imsclient.create_platform_device(
                platform_device=platform_device_obj)

        self.damsclient.assign_data_product(
            input_resource_id=platform_device_id,
            data_product_id=self.input_dp_id)

        # create the DPD, DataProcess and output DataProduct
        dataprocessdef_id, dataprocess_id, dataproduct_id = self.create_data_process(
        )
        self.addCleanup(self.dataprocessclient.delete_data_process,
                        dataprocess_id)
        self.addCleanup(self.dataprocessclient.delete_data_process_definition,
                        dataprocessdef_id)

        # Test for provenance. Get Data product produced by the data processes
        output_data_product_id, _ = self.rrclient.find_objects(
            subject=dataprocess_id,
            object_type=RT.DataProduct,
            predicate=PRED.hasOutputProduct,
            id_only=True)

        output_data_product_provenance = self.dataproductclient.get_data_product_provenance(
            output_data_product_id[0])

        # Do a basic check to see if there were 3 entries in the provenance graph. Parent and Child and the
        # DataProcessDefinition creating the child from the parent.
        self.assertTrue(len(output_data_product_provenance) == 3)
        self.assertTrue(self.input_dp_id in output_data_product_provenance[
            output_data_product_id[0]]['parents'])
        self.assertTrue(platform_device_id in output_data_product_provenance[
            self.input_dp_id]['parents'])
        self.assertTrue(output_data_product_provenance[platform_device_id]
                        ['type'] == 'PlatformDevice')

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),
                     'Skip test while in CEI LAUNCH mode')
    def test_event_transform_worker(self):
        self.data_process_objs = []
        self._output_stream_ids = []
        self.event_verified = Event()

        # test that a data process (type: data-product-in / event-out) can be defined and launched.
        # verify that event fields are correctly populated

        self.parameter_dict_id = self.dataset_management_client.read_parameter_dictionary_by_name(
            name='ctd_parsed_param_dict', id_only=True)

        # create the StreamDefinition
        self.stream_def_id = self.pubsub_client.create_stream_definition(
            name='stream_def', parameter_dictionary_id=self.parameter_dict_id)
        self.addCleanup(self.pubsub_client.delete_stream_definition,
                        self.stream_def_id)

        # create the DataProduct
        input_dp_obj = IonObject(RT.DataProduct,
                                 name='input_data_product',
                                 description='input test stream')
        self.input_dp_id = self.dataproductclient.create_data_product(
            data_product=input_dp_obj, stream_definition_id=self.stream_def_id)

        # retrieve the Stream for this data product
        stream_ids, assoc_ids = self.rrclient.find_objects(
            self.input_dp_id, PRED.hasStream, RT.Stream, True)
        self.stream_id = stream_ids[0]

        # create the DPD and two DPs
        self.event_data_process_id = self.create_event_data_processes()

        # retrieve subscription from data process
        subscription_objs, _ = self.rrclient.find_objects(
            subject=self.event_data_process_id,
            predicate=PRED.hasSubscription,
            object_type=RT.Subscription,
            id_only=False)
        log.debug('test_event_transform_worker subscription_obj:  %s',
                  subscription_objs[0])

        # create a queue to catch the published granules
        self.subscription_id = self.pubsub_client.create_subscription(
            name='parsed_subscription',
            stream_ids=[self.stream_id],
            exchange_name=subscription_objs[0].exchange_name)
        self.addCleanup(self.pubsub_client.delete_subscription,
                        self.subscription_id)

        self.pubsub_client.activate_subscription(self.subscription_id)
        self.addCleanup(self.pubsub_client.deactivate_subscription,
                        self.subscription_id)

        stream_route = self.pubsub_client.read_stream_route(self.stream_id)
        self.publisher = StandaloneStreamPublisher(stream_id=self.stream_id,
                                                   stream_route=stream_route)

        self.start_event_transform_listener()

        self.data_modified = Event()

        rdt = RecordDictionaryTool(stream_definition_id=self.stream_def_id)
        rdt['time'] = [0]  # time should always come first
        rdt['conductivity'] = [1]
        rdt['pressure'] = [2]
        rdt['salinity'] = [8]

        self.publisher.publish(rdt.to_granule())

        self.assertTrue(self.event_verified.wait(self.wait_time))

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),
                     'Skip test while in CEI LAUNCH mode')
    def test_bad_argument_map(self):
        self._output_stream_ids = []

        # test that a data process (type: data-product-in / data-product-out) parameter mapping it validated during
        # data process creation and that the correct exception is raised for both input and output.

        self.parameter_dict_id = self.dataset_management_client.read_parameter_dictionary_by_name(
            name='ctd_parsed_param_dict', id_only=True)

        # create the StreamDefinition
        self.stream_def_id = self.pubsub_client.create_stream_definition(
            name='stream_def', parameter_dictionary_id=self.parameter_dict_id)
        self.addCleanup(self.pubsub_client.delete_stream_definition,
                        self.stream_def_id)

        # create the DataProduct that is the input to the data processes
        input_dp_obj = IonObject(RT.DataProduct,
                                 name='input_data_product',
                                 description='input test stream')
        self.input_dp_id = self.dataproductclient.create_data_product(
            data_product=input_dp_obj, stream_definition_id=self.stream_def_id)

        # two data processes using one transform and one DPD

        dp1_func_output_dp_id = self.create_output_data_product()

        # Set up DPD and DP #2 - array add function
        tf_obj = IonObject(
            RT.TransformFunction,
            name='add_array_func',
            description='adds values in an array',
            function='add_arrays',
            module="ion_example.add_arrays",
            arguments=['arr1', 'arr2'],
            function_type=TransformFunctionType.TRANSFORM,
            uri=
            'http://sddevrepo.oceanobservatories.org/releases/ion_example-0.1-py2.7.egg'
        )
        add_array_func_id, rev = self.rrclient.create(tf_obj)

        dpd_obj = IonObject(
            RT.DataProcessDefinition,
            name='add_arrays',
            description='adds the values of two arrays',
            data_process_type=DataProcessTypeEnum.TRANSFORM_PROCESS)
        add_array_dpd_id = self.dataprocessclient.create_data_process_definition(
            data_process_definition=dpd_obj, function_id=add_array_func_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            self.stream_def_id, add_array_dpd_id, binding='add_array_func')

        # create the data process with invalid argument map
        argument_map = {"arr1": "foo", "arr2": "bar"}
        output_param = "salinity"
        with self.assertRaises(BadRequest) as cm:
            dp1_data_process_id = self.dataprocessclient.create_data_process(
                data_process_definition_id=add_array_dpd_id,
                inputs=[self.input_dp_id],
                outputs=[dp1_func_output_dp_id],
                argument_map=argument_map,
                out_param_name=output_param)

        ex = cm.exception
        log.debug(' exception raised: %s', cm)
        self.assertEqual(
            ex.message,
            "Input data product does not contain the parameters defined in argument map"
        )

        # create the data process with invalid output parameter name
        argument_map = {"arr1": "conductivity", "arr2": "pressure"}
        output_param = "foo"
        with self.assertRaises(BadRequest) as cm:
            dp1_data_process_id = self.dataprocessclient.create_data_process(
                data_process_definition_id=add_array_dpd_id,
                inputs=[self.input_dp_id],
                outputs=[dp1_func_output_dp_id],
                argument_map=argument_map,
                out_param_name=output_param)

        ex = cm.exception
        log.debug(' exception raised: %s', cm)
        self.assertEqual(
            ex.message,
            "Output data product does not contain the output parameter name provided"
        )

    def create_event_data_processes(self):

        # two data processes using one transform and one DPD
        argument_map = {"a": "salinity"}

        # set up DPD and DP #2 - array add function
        tf_obj = IonObject(
            RT.TransformFunction,
            name='validate_salinity_array',
            description='validate_salinity_array',
            function='validate_salinity_array',
            module="ion.processes.data.transforms.test.test_transform_worker",
            arguments=['a'],
            function_type=TransformFunctionType.TRANSFORM)

        add_array_func_id, rev = self.rrclient.create(tf_obj)

        dpd_obj = IonObject(
            RT.DataProcessDefinition,
            name='validate_salinity_array',
            description='validate_salinity_array',
            data_process_type=DataProcessTypeEnum.TRANSFORM_PROCESS,
        )
        add_array_dpd_id = self.dataprocessclient.create_data_process_definition(
            data_process_definition=dpd_obj, function_id=add_array_func_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            self.stream_def_id,
            add_array_dpd_id,
            binding='validate_salinity_array')

        # create the data process
        dp1_data_process_id = self.dataprocessclient.create_data_process(
            data_process_definition_id=add_array_dpd_id,
            inputs=[self.input_dp_id],
            outputs=None,
            argument_map=argument_map)
        self.damsclient.register_process(dp1_data_process_id)
        self.addCleanup(self.dataprocessclient.delete_data_process,
                        dp1_data_process_id)

        return dp1_data_process_id

    def create_data_process(self):

        # two data processes using one transform and one DPD

        dp1_func_output_dp_id = self.create_output_data_product()
        argument_map = {"arr1": "conductivity", "arr2": "pressure"}
        output_param = "salinity"

        # set up DPD and DP #2 - array add function
        tf_obj = IonObject(
            RT.TransformFunction,
            name='add_array_func',
            description='adds values in an array',
            function='add_arrays',
            module="ion_example.add_arrays",
            arguments=['arr1', 'arr2'],
            function_type=TransformFunctionType.TRANSFORM,
            uri=
            'http://sddevrepo.oceanobservatories.org/releases/ion_example-0.1-py2.7.egg'
        )
        add_array_func_id, rev = self.rrclient.create(tf_obj)

        dpd_obj = IonObject(
            RT.DataProcessDefinition,
            name='add_arrays',
            description='adds the values of two arrays',
            data_process_type=DataProcessTypeEnum.TRANSFORM_PROCESS,
            version_label='1.0a')
        add_array_dpd_id = self.dataprocessclient.create_data_process_definition(
            data_process_definition=dpd_obj, function_id=add_array_func_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            self.stream_def_id, add_array_dpd_id, binding='add_array_func')

        # create the data process
        dp1_data_process_id = self.dataprocessclient.create_data_process(
            data_process_definition_id=add_array_dpd_id,
            inputs=[self.input_dp_id],
            outputs=[dp1_func_output_dp_id],
            argument_map=argument_map,
            out_param_name=output_param)
        self.damsclient.register_process(dp1_data_process_id)
        #self.addCleanup(self.dataprocessclient.delete_data_process, dp1_data_process_id)

        # add an attachment object to this DPD to test new SA-21
        import msgpack
        attachment_content = 'foo bar'
        attachment_obj = IonObject(RT.Attachment,
                                   name='test_attachment',
                                   attachment_type=AttachmentType.ASCII,
                                   content_type='text/plain',
                                   content=msgpack.packb(attachment_content))
        att_id = self.rrclient.create_attachment(add_array_dpd_id,
                                                 attachment_obj)
        self.addCleanup(self.rrclient.delete_attachment, att_id)

        return add_array_dpd_id, dp1_data_process_id, dp1_func_output_dp_id

    def create_output_data_product(self):
        dp1_outgoing_stream_id = self.pubsub_client.create_stream_definition(
            name='dp1_stream', parameter_dictionary_id=self.parameter_dict_id)

        dp1_output_dp_obj = IonObject(RT.DataProduct,
                                      name='data_process1_data_product',
                                      description='output of add array func')

        dp1_func_output_dp_id = self.dataproductclient.create_data_product(
            dp1_output_dp_obj, dp1_outgoing_stream_id)
        self.addCleanup(self.dataproductclient.delete_data_product,
                        dp1_func_output_dp_id)
        # retrieve the id of the OUTPUT stream from the out Data Product and add to granule logger
        stream_ids, _ = self.rrclient.find_objects(dp1_func_output_dp_id,
                                                   PRED.hasStream, None, True)
        self._output_stream_ids.append(stream_ids[0])

        subscription_id = self.pubsub_client.create_subscription(
            'validator', data_product_ids=[dp1_func_output_dp_id])
        self.addCleanup(self.pubsub_client.delete_subscription,
                        subscription_id)

        def on_granule(msg, route, stream_id):
            log.debug('recv_packet stream_id: %s route: %s   msg: %s',
                      stream_id, route, msg)
            self.validate_output_granule(msg, route, stream_id)
            self.granule_verified.set()

        validator = StandaloneStreamSubscriber('validator',
                                               callback=on_granule)
        validator.start()
        self.addCleanup(validator.stop)

        self.pubsub_client.activate_subscription(subscription_id)
        self.addCleanup(self.pubsub_client.deactivate_subscription,
                        subscription_id)

        return dp1_func_output_dp_id

    def validate_event(self, *args, **kwargs):
        """
        This method is a callback function for receiving DataProcessStatusEvent.
        """
        data_process_event = args[0]
        log.debug("DataProcessStatusEvent: %s",
                  str(data_process_event.__dict__))

        # if data process already created, check origin
        if self.dp_list:
            self.assertIn(data_process_event.origin, self.dp_list)

            # if this is a heartbeat event then 100 granules have been processed
            if 'data process status update.' in data_process_event.description:
                self.heartbeat_event_verified.set()

        else:
            # else check that this is the assign event

            if 'Data process assigned to transform worker' in data_process_event.description:
                self.worker_assigned_event_verified.set()
            elif 'Data process created for data product' in data_process_event.description:
                self.dp_created_event_verified.set()

    def validate_output_granule(self, msg, route, stream_id):
        self.assertIn(stream_id, self._output_stream_ids)

        rdt = RecordDictionaryTool.load_from_granule(msg)
        log.debug('validate_output_granule  rdt: %s', rdt)
        sal_val = rdt['salinity']
        np.testing.assert_array_equal(sal_val, np.array([3]))

    def start_event_listener(self):

        es = EventSubscriber(event_type=OT.DataProcessStatusEvent,
                             callback=self.validate_event)
        es.start()

        self.addCleanup(es.stop)

    def validate_transform_event(self, *args, **kwargs):
        """
        This method is a callback function for receiving DataProcessStatusEvent.
        """
        status_alert_event = args[0]

        np.testing.assert_array_equal(status_alert_event.origin,
                                      self.stream_id)
        np.testing.assert_array_equal(status_alert_event.values,
                                      np.array([self.event_data_process_id]))
        log.debug("DeviceStatusAlertEvent: %s",
                  str(status_alert_event.__dict__))
        self.event_verified.set()

    def start_event_transform_listener(self):
        es = EventSubscriber(event_type=OT.DeviceStatusAlertEvent,
                             callback=self.validate_transform_event)
        es.start()

        self.addCleanup(es.stop)

    def test_download(self):
        egg_url = 'http://sddevrepo.oceanobservatories.org/releases/ion_example-0.1-py2.7.egg'
        egg_path = TransformWorker.download_egg(egg_url)

        import pkg_resources
        pkg_resources.working_set.add_entry(egg_path)

        from ion_example.add_arrays import add_arrays

        a = add_arrays(1, 2)
        self.assertEquals(a, 3)
class TestResourceRegistry(IonIntegrationTestCase):

    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to bank service
        self.resource_registry_service = ResourceRegistryServiceClient()

    @unittest.skip('Represents a bug in storage/retrieval')
    def test_tuple_in_dict(self):
        # create a resource with a tuple saved in a dict
        transform_obj = IonObject(RT.Transform)
        transform_obj.configuration = {}
        transform_obj.configuration["tuple"] = ('STRING',)
        transform_id, _ = self.resource_registry_service.create(transform_obj)

        # read the resource back
        returned_transform_obj = self.resource_registry_service.read(transform_id)

        self.assertEqual(transform_obj.configuration["tuple"], returned_transform_obj.configuration["tuple"])

    def test_basics(self):
        # Sequence all the tests so that we can save numerous system start and stops
        self._do_test_crud()
        self._do_test_read_mult()
        self._do_test_lifecycle()
        self._do_test_attach()
        self._do_test_association()
        self._do_test_find_resources()
        self._do_test_find_objects_mult()

    def _do_test_crud(self):
        # Some quick registry tests
        # Can't call new with fields that aren't defined in the object's schema
        with self.assertRaises(TypeError) as cm:
            IonObject("UserInfo", name="name", foo="bar")
        self.assertTrue(cm.exception.message == "__init__() got an unexpected keyword argument 'foo'")

        # Can't call new with fields that aren't defined in the object's schema
        with self.assertRaises(TypeError) as cm:
            IonObject("UserInfo", {"name": "name", "foo": "bar"})
        self.assertTrue(cm.exception.message == "__init__() got an unexpected keyword argument 'foo'")

        # Can't call new with fields that aren't defined in the object's schema
        with self.assertRaises(TypeError) as cm:
            IonObject("UserInfo", {"name": "name"}, foo="bar")
        self.assertTrue(cm.exception.message == "__init__() got an unexpected keyword argument 'foo'")

        # Instantiate an object
        obj = IonObject("UserInfo", name="name")
        
        # Can set attributes that aren't in the object's schema
        with self.assertRaises(AttributeError) as cm:
            setattr(obj, "foo", "bar")
        self.assertTrue(cm.exception.message == "'UserInfo' object has no attribute 'foo'")

        # Cam't call update with object that hasn't been persisted
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.update(obj)
        self.assertTrue(cm.exception.message.startswith("Object does not have required '_id' or '_rev' attribute"))

        # Persist object and read it back
        obj_id, obj_rev = self.resource_registry_service.create(obj)
        read_obj = self.resource_registry_service.read(obj_id)

        # Cannot create object with _id and _rev fields pre-set
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create(read_obj)

        # Update object
        read_obj.name = "John Doe"
        self.resource_registry_service.update(read_obj)

        # Update should fail with revision mismatch
        with self.assertRaises(Conflict) as cm:
            self.resource_registry_service.update(read_obj)

        # Re-read and update object
        read_obj = self.resource_registry_service.read(obj_id)
        self.resource_registry_service.update(read_obj)
        
        # Delete object
        self.resource_registry_service.delete(obj_id)

        # Make sure read, update and delete report error
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.read(obj_id)
        self.assertTrue(cm.exception.message.startswith("Object with id"))

        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.update(read_obj)
        self.assertTrue(cm.exception.message.startswith("Object with id"))

        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.delete(obj_id)
        self.assertTrue(cm.exception.message.startswith("Object with id"))

        # Owner creation tests
        user = IonObject("ActorIdentity", name='user')
        uid,_ = self.resource_registry_service.create(user)

        inst = IonObject("InstrumentDevice", name='instrument')
        iid,_ = self.resource_registry_service.create(inst, headers={'ion-actor-id':str(uid)})

        ids,_ = self.resource_registry_service.find_objects(iid, PRED.hasOwner, RT.ActorIdentity, id_only=True)
        self.assertEquals(len(ids), 1)

        assoc = self.resource_registry_service.read(ids[0])
        self.resource_registry_service.delete(iid)

        with self.assertRaises(NotFound) as ex:
            assoc = self.resource_registry_service.read(iid)

    def _do_test_read_mult(self):
        test_resource1_id,_  = self.resource_registry_service.create(Resource(name='test1'))
        test_resource2_id,_  = self.resource_registry_service.create(Resource(name='test2'))

        res_list = [test_resource1_id, test_resource2_id]

        objects = self.resource_registry_service.read_mult(res_list)

        for o in objects:
            self.assertIsInstance(o,Resource)
            self.assertTrue(o._id in res_list)

    def _do_test_lifecycle(self):
        # Lifecycle tests
        att = IonObject("InstrumentDevice", name='mine', description='desc')

        rid,rev = self.resource_registry_service.create(att)

        att1 = self.resource_registry_service.read(rid)
        self.assertEquals(att1.name, att.name)
        self.assertEquals(att1.lcstate, LCS.DRAFT)
        self.assertEquals(att1.availability, AS.PRIVATE)

        new_state = self.resource_registry_service.execute_lifecycle_transition(rid, LCE.PLAN)
        self.assertEquals(new_state, lcstate(LCS.PLANNED, AS.PRIVATE))

        att2 = self.resource_registry_service.read(rid)
        self.assertEquals(att2.lcstate, LCS.PLANNED)
        self.assertEquals(att2.availability, AS.PRIVATE)

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.execute_lifecycle_transition(rid, LCE.UNANNOUNCE)
        self.assertTrue("type=InstrumentDevice, lcstate=PLANNED_PRIVATE has no transition for event unannounce" in cm.exception.message)

        new_state = self.resource_registry_service.execute_lifecycle_transition(rid, LCE.DEVELOP)
        self.assertEquals(new_state, lcstate(LCS.DEVELOPED, AS.PRIVATE))

        with self.assertRaises(BadRequest):
            self.resource_registry_service.execute_lifecycle_transition(
                    resource_id=rid, transition_event='NONE##')

        self.resource_registry_service.set_lifecycle_state(rid, lcstate(LCS.INTEGRATED, AS.PRIVATE))
        att1 = self.resource_registry_service.read(rid)
        self.assertEquals(att1.lcstate, LCS.INTEGRATED)
        self.assertEquals(att1.availability, AS.PRIVATE)

    def _do_test_attach(self):
        binary = "\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x10\x00\x00\x00\x10\x08\x03\x00\x00\x00(-\x0fS\x00\x00\x00\x03sBIT\x08\x08\x08\xdb\xe1O\xe0\x00\x00\x00~PLTEf3\x00\xfc\xf7\xe0\xee\xcc\x00\xd3\xa0\x00\xcc\x99\x00\xec\xcdc\x9fl\x00\xdd\xb2\x00\xff\xff\xff|I\x00\xf9\xdb\x00\xdd\xb5\x19\xd9\xad\x10\xb6\x83\x00\xf8\xd6\x00\xf2\xc5\x00\xd8\xab\x00n;\x00\xff\xcc\x00\xd6\xa4\t\xeb\xb8\x00\x83Q\x00\xadz\x00\xff\xde\x00\xff\xd6\x00\xd6\xa3\x00\xdf\xaf\x00\xde\xad\x10\xbc\x8e\x00\xec\xbe\x00\xec\xd4d\xff\xe3\x00tA\x00\xf6\xc4\x00\xf6\xce\x00\xa5u\x00\xde\xa5\x00\xf7\xbd\x00\xd6\xad\x08\xdd\xaf\x19\x8cR\x00\xea\xb7\x00\xee\xe9\xdf\xc5\x00\x00\x00\tpHYs\x00\x00\n\xf0\x00\x00\n\xf0\x01B\xac4\x98\x00\x00\x00\x1ctEXtSoftware\x00Adobe Fireworks CS4\x06\xb2\xd3\xa0\x00\x00\x00\x15tEXtCreation Time\x0029/4/09Oq\xfdE\x00\x00\x00\xadIDAT\x18\x95M\x8f\x8d\x0e\x820\x0c\x84;ZdC~f\x07\xb2\x11D\x86\x89\xe8\xfb\xbf\xa0+h\xe2\x97\\\xd2^\x93\xb6\x07:1\x9f)q\x9e\xa5\x06\xad\xd5\x13\x8b\xac,\xb3\x02\x9d\x12C\xa1-\xef;M\x08*\x19\xce\x0e?\x1a\xeb4\xcc\xd4\x0c\x831\x87V\xca\xa1\x1a\xd3\x08@\xe4\xbd\xb7\x15P;\xc8\xd4{\x91\xbf\x11\x90\xffg\xdd\x8di\xfa\xb6\x0bs2Z\xff\xe8yg2\xdc\x11T\x96\xc7\x05\xa5\xef\x96+\xa7\xa59E\xae\xe1\x84cm^1\xa6\xb3\xda\x85\xc8\xd8/\x17se\x0eN^'\x8c\xc7\x8e\x88\xa8\xf6p\x8e\xc2;\xc6.\xd0\x11.\x91o\x12\x7f\xcb\xa5\xfe\x00\x89]\x10:\xf5\x00\x0e\xbf\x00\x00\x00\x00IEND\xaeB`\x82"

        # Owner creation tests
        instrument = IonObject("InstrumentDevice", name='instrument')
        iid,_ = self.resource_registry_service.create(instrument)

        att = Attachment(content=binary, attachment_type=AttachmentType.BLOB)
        aid1 = self.resource_registry_service.create_attachment(iid, att)

        att1 = self.resource_registry_service.read_attachment(aid1, include_content=True)
        self.assertEquals(binary, att1.content)

        import base64
        att = Attachment(content=base64.encodestring(binary), attachment_type=AttachmentType.ASCII)
        aid2 = self.resource_registry_service.create_attachment(iid, att)

        att1 = self.resource_registry_service.read_attachment(aid2, include_content=True)
        self.assertEquals(binary, base64.decodestring(att1.content))

        att_ids = self.resource_registry_service.find_attachments(iid, id_only=True)
        self.assertEquals(att_ids, [aid1, aid2])

        att_ids = self.resource_registry_service.find_attachments(iid, id_only=True, descending=True)
        self.assertEquals(att_ids, [aid2, aid1])

        att_ids = self.resource_registry_service.find_attachments(iid, id_only=True, descending=True, limit=1)
        self.assertEquals(att_ids, [aid2])

        atts = self.resource_registry_service.find_attachments(iid, id_only=False, include_content=True, limit=1)
        self.assertEquals(atts[0].content, binary)

        self.resource_registry_service.delete_attachment(aid1)

        att_ids = self.resource_registry_service.find_attachments(iid, id_only=True)
        self.assertEquals(att_ids, [aid2])

        self.resource_registry_service.delete_attachment(aid2)

        att_ids = self.resource_registry_service.find_attachments(iid, id_only=True)
        self.assertEquals(att_ids, [])

    def _do_test_association(self):
        # Instantiate ActorIdentity object
        actor_identity_obj = IonObject("ActorIdentity", name="name")
        actor_identity_obj_id, actor_identity_obj_rev = self.resource_registry_service.create(actor_identity_obj)
        read_actor_identity_obj = self.resource_registry_service.read(actor_identity_obj_id)

        # Instantiate UserInfo object
        user_info_obj = IonObject("UserInfo", name="name")
        user_info_obj_id, user_info_obj_rev = self.resource_registry_service.create(user_info_obj)
        read_user_info_obj = self.resource_registry_service.read(user_info_obj_id)

        # Test create failures
        with self.assertRaises(AttributeError) as cm:
            self.resource_registry_service.create_association(actor_identity_obj_id, PRED.bogus, user_info_obj_id)
        self.assertTrue(cm.exception.message == "bogus")

        # Predicate not provided
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(actor_identity_obj_id, None, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Association must have all elements set")

        # Subject id or object not provided
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(None, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Association must have all elements set")

        # Object id or object not provided
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(actor_identity_obj_id, PRED.hasInfo, None)
        self.assertTrue(cm.exception.message == "Association must have all elements set")

        # Bad subject id
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.create_association("bogus", PRED.hasInfo, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Object with id bogus does not exist.")

        # Bad object id
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.create_association(actor_identity_obj_id, PRED.hasInfo, "bogus")
        self.assertTrue(cm.exception.message == "Object with id bogus does not exist.")

        # _id missing from subject
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(actor_identity_obj, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(cm.exception.message.startswith("Subject id"))

        # _id missing from object
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(actor_identity_obj_id, PRED.hasInfo, user_info_obj)
        self.assertTrue(cm.exception.message.startswith("Object id"))

        # Wrong subject type
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(user_info_obj_id, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Illegal subject type UserInfo for predicate hasInfo")

        # Wrong object type
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(actor_identity_obj_id, PRED.hasInfo, actor_identity_obj_id)
        self.assertTrue(cm.exception.message == "Illegal object type ActorIdentity for predicate hasInfo")

        # Create two different association types between the same subject and predicate
        assoc_id1, assoc_rev1 = self.resource_registry_service.create_association(actor_identity_obj_id, PRED.hasInfo, user_info_obj_id)

        # Read object, subject
        res_obj1 = self.resource_registry_service.read_object(actor_identity_obj_id, PRED.hasInfo, RT.UserInfo)
        self.assertEquals(res_obj1._id, user_info_obj_id)
        res_obj1 = self.resource_registry_service.read_object(actor_identity_obj_id, PRED.hasInfo, RT.UserInfo, id_only=True)
        self.assertEquals(res_obj1, user_info_obj_id)
        res_obj2 = self.resource_registry_service.read_subject(RT.ActorIdentity, PRED.hasInfo, user_info_obj_id)
        self.assertEquals(res_obj2._id, actor_identity_obj_id)
        res_obj2 = self.resource_registry_service.read_subject(RT.ActorIdentity, PRED.hasInfo, user_info_obj_id, id_only=True)
        self.assertEquals(res_obj2, actor_identity_obj_id)

        # Search for associations (good cases)
        ret1 = self.resource_registry_service.find_associations(actor_identity_obj_id, PRED.hasInfo, user_info_obj_id)
        ret2 = self.resource_registry_service.find_associations(actor_identity_obj_id, PRED.hasInfo)
        ret3 = self.resource_registry_service.find_associations(None, PRED.hasInfo)
        self.assertTrue(len(ret1) == len(ret2) == len(ret3))
        self.assertTrue(ret1[0]._id == ret2[0]._id == ret3[0]._id)

        ret1 = self.resource_registry_service.find_associations(actor_identity_obj_id, PRED.hasInfo, user_info_obj_id, None, False)
        ret2 = self.resource_registry_service.find_associations(actor_identity_obj_id, PRED.hasInfo, id_only=False)
        ret3 = self.resource_registry_service.find_associations(predicate=PRED.hasInfo, id_only=False)
        self.assertTrue(ret1 == ret2 == ret3)

        # Search for associations (good cases)
        ret1 = self.resource_registry_service.find_associations(read_actor_identity_obj, PRED.hasInfo, read_user_info_obj)
        ret2 = self.resource_registry_service.find_associations(read_actor_identity_obj, PRED.hasInfo)
        ret3 = self.resource_registry_service.find_associations(None, PRED.hasInfo)
        self.assertTrue(len(ret1) == len(ret2) == len(ret3))
        self.assertTrue(ret1[0]._id == ret2[0]._id == ret3[0]._id)

        ret1 = self.resource_registry_service.find_associations(actor_identity_obj_id, PRED.hasInfo, read_user_info_obj, None, True)
        ret2 = self.resource_registry_service.find_associations(actor_identity_obj_id, PRED.hasInfo, id_only=True)
        ret3 = self.resource_registry_service.find_associations(predicate=PRED.hasInfo, id_only=True)
        self.assertTrue(ret1 == ret2 == ret3)

        # Search for associations (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_associations(None, None, None)
        self.assertIn("Illegal parameters", cm.exception.message)

        # Find subjects (good cases)
        subj_ret1 = self.resource_registry_service.find_subjects(RT.ActorIdentity, PRED.hasInfo, user_info_obj_id, True)
        subj_ret2 = self.resource_registry_service.find_subjects(RT.ActorIdentity, PRED.hasInfo, read_user_info_obj, True)
        self.assertTrue(len(subj_ret1) == len(subj_ret2))
        self.assertTrue(subj_ret1[0] == subj_ret2[0])
        self.assertTrue(subj_ret1[1][0]._id == subj_ret2[1][0]._id)

        subj_ret3 = self.resource_registry_service.find_subjects(None, PRED.hasInfo, user_info_obj_id, True)
        subj_ret4 = self.resource_registry_service.find_subjects(None, None, read_user_info_obj, True)
        self.assertTrue(len(subj_ret3) == len(subj_ret4))
        self.assertTrue(subj_ret3[0] == subj_ret4[0])
        self.assertTrue(subj_ret3[1][0]._id == subj_ret4[1][0]._id)

        subj_ret5 = self.resource_registry_service.find_subjects(None, PRED.hasInfo, user_info_obj_id, False)
        subj_ret6 = self.resource_registry_service.find_subjects(None, None, read_user_info_obj, False)
        self.assertTrue(len(subj_ret5) == len(subj_ret6))
        self.assertTrue(subj_ret5[0][0]._id == subj_ret6[0][0]._id)
        self.assertTrue(subj_ret5[1][0]._id == subj_ret6[1][0]._id)

        # Find subjects (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_subjects(None, None, None)
        self.assertTrue(cm.exception.message == "Must provide object")

        with self.assertRaises(AttributeError) as cm:
            self.resource_registry_service.find_subjects(RT.UserCredentials, PRED.bogus, user_info_obj_id, True)
        self.assertTrue(cm.exception.message == "bogus")

        ret = self.resource_registry_service.find_subjects(RT.UserInfo, PRED.hasCredentials, user_info_obj_id, True)
        self.assertTrue(len(ret[0]) == 0)

        ret = self.resource_registry_service.find_subjects(RT.UserCredentials, PRED.hasInfo, user_info_obj_id, True)
        self.assertTrue(len(ret[0]) == 0)

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_subjects(RT.UserCredentials, PRED.hasInfo, user_info_obj, True)
        self.assertTrue(cm.exception.message == "Object id not available in object")

        # Find objects (good cases)
        subj_ret1 = self.resource_registry_service.find_objects(actor_identity_obj_id, PRED.hasInfo, RT.UserInfo, True)
        subj_ret2 = self.resource_registry_service.find_objects(read_actor_identity_obj, PRED.hasInfo, RT.UserInfo, True)
        self.assertTrue(len(subj_ret1) == len(subj_ret2))
        self.assertTrue(subj_ret1[0] == subj_ret2[0])
        self.assertTrue(subj_ret1[1][0]._id == subj_ret2[1][0]._id)

        subj_ret3 = self.resource_registry_service.find_objects(actor_identity_obj_id, PRED.hasInfo, None, True)
        subj_ret4 = self.resource_registry_service.find_objects(actor_identity_obj_id, None, None, True)
        self.assertTrue(len(subj_ret3) == len(subj_ret4))
        self.assertTrue(subj_ret3[0] == subj_ret4[0])
        self.assertTrue(subj_ret3[1][0]._id == subj_ret4[1][0]._id)

        subj_ret5 = self.resource_registry_service.find_objects(actor_identity_obj_id, PRED.hasInfo, None, False)
        subj_ret6 = self.resource_registry_service.find_objects(read_actor_identity_obj, None, None, False)
        self.assertTrue(len(subj_ret5) == len(subj_ret6))
        self.assertTrue(subj_ret5[0][0]._id == subj_ret6[0][0]._id)
        self.assertTrue(subj_ret5[1][0]._id == subj_ret6[1][0]._id)

        # Find objects (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_objects(None, None, None)
        self.assertTrue(cm.exception.message == "Must provide subject")

        with self.assertRaises(AttributeError) as cm:
            self.resource_registry_service.find_objects(actor_identity_obj_id, PRED.bogus, RT.UserCredentials, True)
        self.assertTrue(cm.exception.message == "bogus")

        ret = self.resource_registry_service.find_objects(actor_identity_obj_id, PRED.hasCredentials, RT.ActorIdentity, True)
        self.assertTrue(len(ret[0]) == 0)

        ret = self.resource_registry_service.find_objects(actor_identity_obj_id, PRED.hasInfo, RT.UserCredentials, True)
        self.assertTrue(len(ret[0]) == 0)

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_objects(actor_identity_obj, PRED.hasInfo, RT.UserInfo, True)
        self.assertTrue(cm.exception.message == "Object id not available in subject")

        # Get association (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.get_association(None, None, None)
        self.assertIn("Illegal parameters", cm.exception.message)


        assoc = self.resource_registry_service.get_association(actor_identity_obj_id, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(assoc._id == assoc_id1)

        # Delete (bad cases)
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.delete_association("bogus")
        self.assertTrue(cm.exception.message == "Object with id bogus does not exist.")

        # Delete other association
        self.resource_registry_service.delete_association(assoc_id1)

        # Delete resources
        self.resource_registry_service.delete(actor_identity_obj_id)
        self.resource_registry_service.delete(user_info_obj_id)

    def _do_test_find_resources(self):
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_resources(RT.UserInfo, LCS.DRAFT, "name", False)
        self.assertTrue(cm.exception.message == "find by name does not support lcstate")
        
        ret = self.resource_registry_service.find_resources(RT.UserInfo, None, "name", False)
        self.assertEquals(len(ret[0]), 0)

        # Instantiate an object
        obj = IonObject("InstrumentAgentInstance", name="name")
        
        # Persist object and read it back
        obj_id, obj_rev = self.resource_registry_service.create(obj)
        read_obj = self.resource_registry_service.read(obj_id)

        ret = self.resource_registry_service.find_resources(RT.InstrumentAgentInstance, None, "name", False)
        self.assertEquals(len(ret[0]), 1)
        self.assertEquals(ret[0][0]._id, read_obj._id)

        ret = self.resource_registry_service.find_resources(RT.InstrumentAgentInstance, LCS.DEPLOYED, None, False)
        self.assertEquals(len(ret[0]), 1)
        self.assertEquals(ret[0][0]._id, read_obj._id)

    def _do_test_find_objects_mult(self):
        dp = DataProcess()
        transform = Transform()
        pd = ProcessDefinition()

        dp_id, _ = self.resource_registry_service.create(dp)
        transform_id, _ = self.resource_registry_service.create(transform)
        pd_id, _ = self.resource_registry_service.create(pd)

        self.resource_registry_service.create_association(subject=dp_id, object=transform_id, predicate=PRED.hasTransform)
        self.resource_registry_service.create_association(subject=transform_id, object=pd_id, predicate=PRED.hasProcessDefinition)

        results, _  = self.resource_registry_service.find_objects_mult(subjects=[dp_id],id_only=True)
        self.assertTrue(results == [transform_id])

        results, _  = self.resource_registry_service.find_objects_mult(subjects=[dp_id, transform_id], id_only=True)
        results.sort()
        correct = [transform_id, pd_id]
        correct.sort()
        self.assertTrue(results == correct)

    @attr('EXT')
    def test_get_resource_extension(self):


        #Testing multiple instrument owners
        subject1 = "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Roger Unwin A254"

        actor_identity_obj1 = IonObject(RT.ActorIdentity, {"name": subject1})
        actor_id1,_ = self.resource_registry_service.create(actor_identity_obj1)

        user_info_obj1 = IonObject(RT.UserInfo, {"name": "Foo"})
        user_info_id1,_ = self.resource_registry_service.create(user_info_obj1)
        self.resource_registry_service.create_association(actor_id1, PRED.hasInfo, user_info_id1)

        subject2 = "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Bob Cumbers A256"

        actor_identity_obj2 = IonObject(RT.ActorIdentity, {"name": subject2})
        actor_id2,_ = self.resource_registry_service.create(actor_identity_obj2)

        user_info_obj2 = IonObject(RT.UserInfo, {"name": "Foo2"})
        user_info_id2,_ = self.resource_registry_service.create(user_info_obj2)
        self.resource_registry_service.create_association(actor_id2, PRED.hasInfo, user_info_id2)

        test_obj = IonObject(RT.InformationResource,  {"name": "TestResource"})
        test_obj_id,_ = self.resource_registry_service.create(test_obj)
        self.resource_registry_service.create_association(test_obj_id, PRED.hasOwner, actor_id1)
        self.resource_registry_service.create_association(test_obj_id, PRED.hasOwner, actor_id2)

        extended_resource = self.resource_registry_service.get_resource_extension(test_obj_id, OT.ExtendedInformationResource )

        self.assertEqual(test_obj_id,extended_resource._id)
        self.assertEqual(len(extended_resource.owners),2)

        extended_resource_list = self.resource_registry_service.get_resource_extension(str([user_info_id1,user_info_id2]), OT.ExtendedInformationResource)
        self.assertEqual(len(extended_resource_list), 2)

        optional_args = {'user_id': user_info_id1}
        extended_resource = self.resource_registry_service.get_resource_extension(test_obj_id, OT.TestExtendedInformationResource, optional_args=optional_args )

        self.assertEqual(test_obj_id,extended_resource._id)
        self.assertEqual(len(extended_resource.owners),2)
        self.assertEqual(extended_resource.user_id, user_info_id1)
class TestTransformWorker(IonIntegrationTestCase):

    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        # Instantiate a process to represent the test
        process=TransformWorkerTestProcess()

        self.dataset_management_client = DatasetManagementServiceClient(node=self.container.node)
        self.pubsub_client = PubsubManagementServiceClient(node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node)
        self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceProcessClient(node=self.container.node, process = process)

        self.time_dom, self.spatial_dom = time_series_domain()

        self.ph = ParameterHelper(self.dataset_management_client, self.addCleanup)

        self.wait_time = CFG.get_safe('endpoint.receive.timeout', 10)

    def push_granule(self, data_product_id):
        '''
        Publishes and monitors that the granule arrived
        '''
        datasets, _ = self.rrclient.find_objects(data_product_id, PRED.hasDataset, id_only=True)
        dataset_monitor = DatasetMonitor(datasets[0])

        rdt = self.ph.rdt_for_data_product(data_product_id)
        self.ph.fill_parsed_rdt(rdt)
        self.ph.publish_rdt_to_data_product(data_product_id, rdt)

        assert dataset_monitor.wait()
        dataset_monitor.stop()



    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode')
    def test_transform_worker(self):

        # test that a data process (type: data-product-in / data-product-out) can be defined and launched.
        # verify that the output granule fields are correctly populated

        # test that the input and output data products are linked to facilitate provenance

        self.dp_list = []
        self.data_process_objs = []
        self._output_stream_ids = []
        self.granule_verified = Event()
        self.worker_assigned_event_verified = Event()
        self.dp_created_event_verified = Event()
        self.heartbeat_event_verified = Event()

        self.parameter_dict_id = self.dataset_management_client.read_parameter_dictionary_by_name(name='ctd_parsed_param_dict', id_only=True)

        # create the StreamDefinition
        self.stream_def_id = self.pubsub_client.create_stream_definition(name='stream_def', parameter_dictionary_id=self.parameter_dict_id)
        self.addCleanup(self.pubsub_client.delete_stream_definition, self.stream_def_id)

        # create the DataProduct that is the input to the data processes
        input_dp_obj = IonObject(  RT.DataProduct, name='input_data_product', description='input test stream',
                                             temporal_domain = self.time_dom.dump(),  spatial_domain = self.spatial_dom.dump())
        self.input_dp_id = self.dataproductclient.create_data_product(data_product=input_dp_obj,  stream_definition_id=self.stream_def_id)

        # retrieve the Stream for this data product
        stream_ids, assoc_ids = self.rrclient.find_objects(self.input_dp_id, PRED.hasStream, RT.Stream, True)
        self.stream_id = stream_ids[0]

        self.start_event_listener()

        # create the DPD, DataProcess and output DataProduct
        dataprocessdef_id, dataprocess_id, dataproduct_id = self.create_data_process()
        self.dp_list.append(dataprocess_id)

        # validate the repository for data product algorithms persists the new resources  NEW SA-1
        # create_data_process call created one of each
        dpd_ids, _ = self.rrclient.find_resources(restype=OT.DataProcessDefinition, id_only=False)
        # there will be more than one becuase of the DPDs that reperesent the PFs in the data product above
        self.assertTrue(dpd_ids is not None)
        dp_ids, _ = self.rrclient.find_resources(restype=OT.DataProcess, id_only=False)
        # only one DP becuase the PFs that are in the code dataproduct above are not activated yet.
        self.assertEquals(len(dp_ids), 1)


        # validate the name and version label  NEW SA - 2
        dataprocessdef_obj = self.dataprocessclient.read_data_process_definition(dataprocessdef_id)
        self.assertEqual(dataprocessdef_obj.version_label, '1.0a')
        self.assertEqual(dataprocessdef_obj.name, 'add_arrays')

        # validate that the DPD has an attachment  NEW SA - 21
        attachment_ids, assoc_ids = self.rrclient.find_objects(dataprocessdef_id, PRED.hasAttachment, RT.Attachment, True)
        self.assertEqual(len(attachment_ids), 1)
        attachment_obj = self.rrclient.read_attachment(attachment_ids[0])
        log.debug('attachment: %s', attachment_obj)

        # validate that the data process resource has input and output data products associated
        # L4-CI-SA-RQ-364  and NEW SA-3
        outproduct_ids, assoc_ids = self.rrclient.find_objects(dataprocess_id, PRED.hasOutputProduct, RT.DataProduct, True)
        self.assertEqual(len(outproduct_ids), 1)
        inproduct_ids, assoc_ids = self.rrclient.find_objects(dataprocess_id, PRED.hasInputProduct, RT.DataProduct, True)
        self.assertEqual(len(inproduct_ids), 1)

        # Test for provenance. Get Data product produced by the data processes
        output_data_product_id,_ = self.rrclient.find_objects(subject=dataprocess_id,
            object_type=RT.DataProduct,
            predicate=PRED.hasOutputProduct,
            id_only=True)

        output_data_product_provenance = self.dataproductclient.get_data_product_provenance(output_data_product_id[0])

        # Do a basic check to see if there were 3 entries in the provenance graph. Parent and Child and the
        # DataProcessDefinition creating the child from the parent.
        self.assertTrue(len(output_data_product_provenance) == 2)
        self.assertTrue(self.input_dp_id in output_data_product_provenance[output_data_product_id[0]]['parents'])
        self.assertTrue(output_data_product_provenance[output_data_product_id[0]]['parents'][self.input_dp_id]['data_process_definition_id'] == dataprocessdef_id)


        # NEW SA - 4 | Data processing shall include the appropriate data product algorithm name and version number in
        # the metadata of each output data product created by the data product algorithm.
        output_data_product_obj,_ = self.rrclient.find_objects(subject=dataprocess_id,
            object_type=RT.DataProduct,
            predicate=PRED.hasOutputProduct,
            id_only=False)
        self.assertTrue(output_data_product_obj[0].name != None)
        self.assertTrue(output_data_product_obj[0]._rev != None)

        # retrieve subscription from data process
        subscription_objs, _ = self.rrclient.find_objects(subject=dataprocess_id, predicate=PRED.hasSubscription, object_type=RT.Subscription, id_only=False)
        log.debug('test_transform_worker subscription_obj:  %s', subscription_objs[0])

        # create a queue to catch the published granules
        self.subscription_id = self.pubsub_client.create_subscription(name='parsed_subscription', stream_ids=[self.stream_id], exchange_name=subscription_objs[0].exchange_name)
        self.addCleanup(self.pubsub_client.delete_subscription, self.subscription_id)

        self.pubsub_client.activate_subscription(self.subscription_id)
        self.addCleanup(self.pubsub_client.deactivate_subscription, self.subscription_id)

        stream_route = self.pubsub_client.read_stream_route(self.stream_id)
        self.publisher = StandaloneStreamPublisher(stream_id=self.stream_id, stream_route=stream_route )


        for n in range(1, 101):
            rdt = RecordDictionaryTool(stream_definition_id=self.stream_def_id)
            rdt['time']         = [0] # time should always come first
            rdt['conductivity'] = [1]
            rdt['pressure']     = [2]
            rdt['salinity']     = [8]

            self.publisher.publish(rdt.to_granule())

        # validate that the output granule is received and the updated value is correct
        self.assertTrue(self.granule_verified.wait(self.wait_time))


        # validate that the data process loaded into worker event is received    (L4-CI-SA-RQ-182)
        self.assertTrue(self.worker_assigned_event_verified.wait(self.wait_time))

        # validate that the data process create (with data product ids) event is received    (NEW SA -42)
        self.assertTrue(self.dp_created_event_verified.wait(self.wait_time))

        # validate that the data process heartbeat event is received (for every hundred granules processed) (L4-CI-SA-RQ-182)
        #this takes a while so set wait limit to large value
        self.assertTrue(self.heartbeat_event_verified.wait(200))

        # validate that the code from the transform function can be retrieve via inspect_data_process_definition
        src = self.dataprocessclient.inspect_data_process_definition(dataprocessdef_id)
        self.assertIn( 'def add_arrays(a, b)', src)

        # now delete the DPD and DP then verify that the resources are retired so that information required for provenance are still available
        self.dataprocessclient.delete_data_process(dataprocess_id)
        self.dataprocessclient.delete_data_process_definition(dataprocessdef_id)

        in_dp_objs, _ = self.rrclient.find_objects(subject=dataprocess_id, predicate=PRED.hasInputProduct, object_type=RT.DataProduct, id_only=True)
        self.assertTrue(in_dp_objs is not None)

        dpd_objs, _ = self.rrclient.find_subjects(subject_type=RT.DataProcessDefinition, predicate=PRED.hasDataProcess, object=dataprocess_id, id_only=True)
        self.assertTrue(dpd_objs is not None)

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode')
    def test_transform_worker_with_instrumentdevice(self):

        # test that a data process (type: data-product-in / data-product-out) can be defined and launched.
        # verify that the output granule fields are correctly populated

        # test that the input and output data products are linked to facilitate provenance

        self.data_process_objs = []
        self._output_stream_ids = []
        self.event_verified = Event()

        # Create CTD Parsed as the initial data product
        # create a stream definition for the data from the ctd simulator
        self.parameter_dict_id = self.dataset_management_client.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        self.stream_def_id = self.pubsub_client.create_stream_definition(name='stream_def', parameter_dictionary_id=self.parameter_dict_id)

        # create the DataProduct that is the input to the data processes
        input_dp_obj = IonObject(  RT.DataProduct, name='input_data_product', description='input test stream',
            temporal_domain = self.time_dom.dump(),  spatial_domain = self.spatial_dom.dump())
        self.input_dp_id = self.dataproductclient.create_data_product(data_product=input_dp_obj,  stream_definition_id=self.stream_def_id)

        # retrieve the Stream for this data product
        stream_ids, assoc_ids = self.rrclient.find_objects(self.input_dp_id, PRED.hasStream, RT.Stream, True)
        self.stream_id = stream_ids[0]

        log.debug('new ctd_parsed_data_product_id = %s' % self.input_dp_id)

        # only ever need one device for testing purposes.
        instDevice_obj,_ = self.rrclient.find_resources(restype=RT.InstrumentDevice, name='test_ctd_device')
        if instDevice_obj:
            instDevice_id = instDevice_obj[0]._id
        else:
            instDevice_obj = IonObject(RT.InstrumentDevice, name='test_ctd_device', description="test_ctd_device", serial_number="12345" )
            instDevice_id = self.imsclient.create_instrument_device(instrument_device=instDevice_obj)

        self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=self.input_dp_id)

        # create the DPD, DataProcess and output DataProduct
        dataprocessdef_id, dataprocess_id, dataproduct_id = self.create_data_process()

        self.addCleanup(self.dataprocessclient.delete_data_process, dataprocess_id)
        self.addCleanup(self.dataprocessclient.delete_data_process_definition, dataprocessdef_id)

        # Test for provenance. Get Data product produced by the data processes
        output_data_product_id,_ = self.rrclient.find_objects(subject=dataprocess_id,
            object_type=RT.DataProduct,
            predicate=PRED.hasOutputProduct,
            id_only=True)

        output_data_product_provenance = self.dataproductclient.get_data_product_provenance(output_data_product_id[0])

        # Do a basic check to see if there were 3 entries in the provenance graph. Parent and Child and the
        # DataProcessDefinition creating the child from the parent.
        self.assertTrue(len(output_data_product_provenance) == 3)
        self.assertTrue(self.input_dp_id in output_data_product_provenance[output_data_product_id[0]]['parents'])
        self.assertTrue(instDevice_id in output_data_product_provenance[self.input_dp_id]['parents'])
        self.assertTrue(output_data_product_provenance[instDevice_id]['type'] == 'InstrumentDevice')

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode')
    def test_transform_worker_with_platformdevice(self):

        # test that a data process (type: data-product-in / data-product-out) can be defined and launched.
        # verify that the output granule fields are correctly populated

        # test that the input and output data products are linked to facilitate provenance

        self.data_process_objs = []
        self._output_stream_ids = []
        self.event_verified = Event()

        # Create CTD Parsed as the initial data product
        # create a stream definition for the data from the ctd simulator
        self.parameter_dict_id = self.dataset_management_client.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        self.stream_def_id = self.pubsub_client.create_stream_definition(name='stream_def', parameter_dictionary_id=self.parameter_dict_id)

        # create the DataProduct that is the input to the data processes
        input_dp_obj = IonObject(  RT.DataProduct, name='input_data_product', description='input test stream',
            temporal_domain = self.time_dom.dump(),  spatial_domain = self.spatial_dom.dump())
        self.input_dp_id = self.dataproductclient.create_data_product(data_product=input_dp_obj,  stream_definition_id=self.stream_def_id)

        # retrieve the Stream for this data product
        stream_ids, assoc_ids = self.rrclient.find_objects(self.input_dp_id, PRED.hasStream, RT.Stream, True)
        self.stream_id = stream_ids[0]

        log.debug('new ctd_parsed_data_product_id = %s' % self.input_dp_id)

        # only ever need one device for testing purposes.
        platform_device_obj,_ = self.rrclient.find_resources(restype=RT.PlatformDevice, name='TestPlatform')
        if platform_device_obj:
            platform_device_id = platform_device_obj[0]._id
        else:
            platform_device_obj = IonObject(RT.PlatformDevice, name='TestPlatform', description="TestPlatform", serial_number="12345" )
            platform_device_id = self.imsclient.create_platform_device(platform_device=platform_device_obj)

        self.damsclient.assign_data_product(input_resource_id=platform_device_id, data_product_id=self.input_dp_id)

        # create the DPD, DataProcess and output DataProduct
        dataprocessdef_id, dataprocess_id, dataproduct_id = self.create_data_process()
        self.addCleanup(self.dataprocessclient.delete_data_process, dataprocess_id)
        self.addCleanup(self.dataprocessclient.delete_data_process_definition, dataprocessdef_id)

        # Test for provenance. Get Data product produced by the data processes
        output_data_product_id,_ = self.rrclient.find_objects(subject=dataprocess_id,
            object_type=RT.DataProduct,
            predicate=PRED.hasOutputProduct,
            id_only=True)

        output_data_product_provenance = self.dataproductclient.get_data_product_provenance(output_data_product_id[0])

        # Do a basic check to see if there were 3 entries in the provenance graph. Parent and Child and the
        # DataProcessDefinition creating the child from the parent.
        self.assertTrue(len(output_data_product_provenance) == 3)
        self.assertTrue(self.input_dp_id in output_data_product_provenance[output_data_product_id[0]]['parents'])
        self.assertTrue(platform_device_id in output_data_product_provenance[self.input_dp_id]['parents'])
        self.assertTrue(output_data_product_provenance[platform_device_id]['type'] == 'PlatformDevice')


    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode')
    def test_event_transform_worker(self):
        self.data_process_objs = []
        self._output_stream_ids = []
        self.event_verified = Event()


        # test that a data process (type: data-product-in / event-out) can be defined and launched.
        # verify that event fields are correctly populated


        self.parameter_dict_id = self.dataset_management_client.read_parameter_dictionary_by_name(name='ctd_parsed_param_dict', id_only=True)

        # create the StreamDefinition
        self.stream_def_id = self.pubsub_client.create_stream_definition(name='stream_def', parameter_dictionary_id=self.parameter_dict_id)
        self.addCleanup(self.pubsub_client.delete_stream_definition, self.stream_def_id)

        # create the DataProduct
        input_dp_obj = IonObject(  RT.DataProduct, name='input_data_product', description='input test stream',
                                             temporal_domain = self.time_dom.dump(),  spatial_domain = self.spatial_dom.dump())
        self.input_dp_id = self.dataproductclient.create_data_product(data_product=input_dp_obj,  stream_definition_id=self.stream_def_id)

        # retrieve the Stream for this data product
        stream_ids, assoc_ids = self.rrclient.find_objects(self.input_dp_id, PRED.hasStream, RT.Stream, True)
        self.stream_id = stream_ids[0]

        # create the DPD and two DPs
        self.event_data_process_id = self.create_event_data_processes()

        # retrieve subscription from data process
        subscription_objs, _ = self.rrclient.find_objects(subject=self.event_data_process_id, predicate=PRED.hasSubscription, object_type=RT.Subscription, id_only=False)
        log.debug('test_event_transform_worker subscription_obj:  %s', subscription_objs[0])

        # create a queue to catch the published granules
        self.subscription_id = self.pubsub_client.create_subscription(name='parsed_subscription', stream_ids=[self.stream_id], exchange_name=subscription_objs[0].exchange_name)
        self.addCleanup(self.pubsub_client.delete_subscription, self.subscription_id)

        self.pubsub_client.activate_subscription(self.subscription_id)
        self.addCleanup(self.pubsub_client.deactivate_subscription, self.subscription_id)

        stream_route = self.pubsub_client.read_stream_route(self.stream_id)
        self.publisher = StandaloneStreamPublisher(stream_id=self.stream_id, stream_route=stream_route )

        self.start_event_transform_listener()

        self.data_modified = Event()

        rdt = RecordDictionaryTool(stream_definition_id=self.stream_def_id)
        rdt['time']         = [0] # time should always come first
        rdt['conductivity'] = [1]
        rdt['pressure']     = [2]
        rdt['salinity']     = [8]

        self.publisher.publish(rdt.to_granule())

        self.assertTrue(self.event_verified.wait(self.wait_time))



    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode')
    def test_bad_argument_map(self):
        self._output_stream_ids = []

        # test that a data process (type: data-product-in / data-product-out) parameter mapping it validated during
        # data process creation and that the correct exception is raised for both input and output.

        self.parameter_dict_id = self.dataset_management_client.read_parameter_dictionary_by_name(name='ctd_parsed_param_dict', id_only=True)

        # create the StreamDefinition
        self.stream_def_id = self.pubsub_client.create_stream_definition(name='stream_def', parameter_dictionary_id=self.parameter_dict_id)
        self.addCleanup(self.pubsub_client.delete_stream_definition, self.stream_def_id)

        # create the DataProduct that is the input to the data processes
        input_dp_obj = IonObject(  RT.DataProduct, name='input_data_product', description='input test stream',
                                             temporal_domain = self.time_dom.dump(),  spatial_domain = self.spatial_dom.dump())
        self.input_dp_id = self.dataproductclient.create_data_product(data_product=input_dp_obj,  stream_definition_id=self.stream_def_id)

        # two data processes using one transform and one DPD

        dp1_func_output_dp_id =  self.create_output_data_product()


        # Set up DPD and DP #2 - array add function
        tf_obj = IonObject(RT.TransformFunction,
            name='add_array_func',
            description='adds values in an array',
            function='add_arrays',
            module="ion_example.add_arrays",
            arguments=['arr1', 'arr2'],
            function_type=TransformFunctionType.TRANSFORM,
            uri='http://sddevrepo.oceanobservatories.org/releases/ion_example-0.1-py2.7.egg'
            )
        add_array_func_id, rev = self.rrclient.create(tf_obj)

        dpd_obj = IonObject(RT.DataProcessDefinition,
            name='add_arrays',
            description='adds the values of two arrays',
            data_process_type=DataProcessTypeEnum.TRANSFORM_PROCESS
            )
        add_array_dpd_id = self.dataprocessclient.create_data_process_definition(data_process_definition=dpd_obj, function_id=add_array_func_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(self.stream_def_id, add_array_dpd_id, binding='add_array_func' )

        # create the data process with invalid argument map
        argument_map = {"arr1": "foo", "arr2": "bar"}
        output_param = "salinity"
        with self.assertRaises(BadRequest) as cm:
            dp1_data_process_id = self.dataprocessclient.create_data_process(data_process_definition_id=add_array_dpd_id, inputs=[self.input_dp_id],
                                                                                 outputs=[dp1_func_output_dp_id], argument_map=argument_map, out_param_name=output_param)

        ex = cm.exception
        log.debug(' exception raised: %s', cm)
        self.assertEqual(ex.message, "Input data product does not contain the parameters defined in argument map")

        # create the data process with invalid output parameter name
        argument_map = {"arr1": "conductivity", "arr2": "pressure"}
        output_param = "foo"
        with self.assertRaises(BadRequest) as cm:
            dp1_data_process_id = self.dataprocessclient.create_data_process(data_process_definition_id=add_array_dpd_id, inputs=[self.input_dp_id],
                                                                                 outputs=[dp1_func_output_dp_id], argument_map=argument_map, out_param_name=output_param)

        ex = cm.exception
        log.debug(' exception raised: %s', cm)
        self.assertEqual(ex.message, "Output data product does not contain the output parameter name provided")


    def create_event_data_processes(self):

        # two data processes using one transform and one DPD
        argument_map= {"a": "salinity"}


        # set up DPD and DP #2 - array add function
        tf_obj = IonObject(RT.TransformFunction,
            name='validate_salinity_array',
            description='validate_salinity_array',
            function='validate_salinity_array',
            module="ion.processes.data.transforms.test.test_transform_worker",
            arguments=['a'],
            function_type=TransformFunctionType.TRANSFORM
            )

        add_array_func_id, rev = self.rrclient.create(tf_obj)

        dpd_obj = IonObject(RT.DataProcessDefinition,
            name='validate_salinity_array',
            description='validate_salinity_array',
            data_process_type=DataProcessTypeEnum.TRANSFORM_PROCESS,
            )
        add_array_dpd_id = self.dataprocessclient.create_data_process_definition(data_process_definition=dpd_obj, function_id=add_array_func_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(self.stream_def_id, add_array_dpd_id, binding='validate_salinity_array' )

        # create the data process
        dp1_data_process_id = self.dataprocessclient.create_data_process(data_process_definition_id=add_array_dpd_id, inputs=[self.input_dp_id],
                                                                             outputs=None, argument_map=argument_map)
        self.damsclient.register_process(dp1_data_process_id)
        self.addCleanup(self.dataprocessclient.delete_data_process, dp1_data_process_id)

        return dp1_data_process_id

    def create_data_process(self):

        # two data processes using one transform and one DPD

        dp1_func_output_dp_id =  self.create_output_data_product()
        argument_map = {"arr1": "conductivity", "arr2": "pressure"}
        output_param = "salinity"


        # set up DPD and DP #2 - array add function
        tf_obj = IonObject(RT.TransformFunction,
            name='add_array_func',
            description='adds values in an array',
            function='add_arrays',
            module="ion_example.add_arrays",
            arguments=['arr1', 'arr2'],
            function_type=TransformFunctionType.TRANSFORM,
             uri='http://sddevrepo.oceanobservatories.org/releases/ion_example-0.1-py2.7.egg'
            )
        add_array_func_id, rev = self.rrclient.create(tf_obj)

        dpd_obj = IonObject(RT.DataProcessDefinition,
            name='add_arrays',
            description='adds the values of two arrays',
            data_process_type=DataProcessTypeEnum.TRANSFORM_PROCESS,
            version_label='1.0a'
            )
        add_array_dpd_id = self.dataprocessclient.create_data_process_definition(data_process_definition=dpd_obj, function_id=add_array_func_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(self.stream_def_id, add_array_dpd_id, binding='add_array_func' )

        # create the data process
        dp1_data_process_id = self.dataprocessclient.create_data_process(data_process_definition_id=add_array_dpd_id, inputs=[self.input_dp_id],
                                                                             outputs=[dp1_func_output_dp_id], argument_map=argument_map, out_param_name=output_param)
        self.damsclient.register_process(dp1_data_process_id)
        #self.addCleanup(self.dataprocessclient.delete_data_process, dp1_data_process_id)

        # add an attachment object to this DPD to test new SA-21
        import msgpack
        attachment_content = 'foo bar'
        attachment_obj = IonObject( RT.Attachment,
                                name='test_attachment',
                                attachment_type=AttachmentType.ASCII,
                                content_type='text/plain',
                                content=msgpack.packb(attachment_content))
        att_id = self.rrclient.create_attachment(add_array_dpd_id, attachment_obj)
        self.addCleanup(self.rrclient.delete_attachment, att_id)

        return add_array_dpd_id, dp1_data_process_id, dp1_func_output_dp_id


    def create_output_data_product(self):
        dp1_outgoing_stream_id = self.pubsub_client.create_stream_definition(name='dp1_stream', parameter_dictionary_id=self.parameter_dict_id)

        dp1_output_dp_obj = IonObject(  RT.DataProduct,
            name='data_process1_data_product',
            description='output of add array func',
            temporal_domain = self.time_dom.dump(),
            spatial_domain = self.spatial_dom.dump())

        dp1_func_output_dp_id = self.dataproductclient.create_data_product(dp1_output_dp_obj,  dp1_outgoing_stream_id)
        self.addCleanup(self.dataproductclient.delete_data_product, dp1_func_output_dp_id)
        # retrieve the id of the OUTPUT stream from the out Data Product and add to granule logger
        stream_ids, _ = self.rrclient.find_objects(dp1_func_output_dp_id, PRED.hasStream, None, True)
        self._output_stream_ids.append(stream_ids[0])

        subscription_id = self.pubsub_client.create_subscription('validator', data_product_ids=[dp1_func_output_dp_id])
        self.addCleanup(self.pubsub_client.delete_subscription, subscription_id)

        def on_granule(msg, route, stream_id):
            log.debug('recv_packet stream_id: %s route: %s   msg: %s', stream_id, route, msg)
            self.validate_output_granule(msg, route, stream_id)
            self.granule_verified.set()

        validator = StandaloneStreamSubscriber('validator', callback=on_granule)
        validator.start()
        self.addCleanup(validator.stop)

        self.pubsub_client.activate_subscription(subscription_id)
        self.addCleanup(self.pubsub_client.deactivate_subscription, subscription_id)

        return dp1_func_output_dp_id


    def validate_event(self, *args, **kwargs):
        """
        This method is a callback function for receiving DataProcessStatusEvent.
        """
        data_process_event = args[0]
        log.debug("DataProcessStatusEvent: %s" ,  str(data_process_event.__dict__))

        # if data process already created, check origin
        if self.dp_list:
            self.assertIn( data_process_event.origin, self.dp_list)

            # if this is a heartbeat event then 100 granules have been processed
            if 'data process status update.' in data_process_event.description:
                self.heartbeat_event_verified.set()

        else:
            # else check that this is the assign event

            if 'Data process assigned to transform worker' in data_process_event.description:
                self.worker_assigned_event_verified.set()
            elif 'Data process created for data product' in data_process_event.description:
                self.dp_created_event_verified.set()


    def validate_output_granule(self, msg, route, stream_id):
        self.assertIn( stream_id, self._output_stream_ids)

        rdt = RecordDictionaryTool.load_from_granule(msg)
        log.debug('validate_output_granule  rdt: %s', rdt)
        sal_val = rdt['salinity']
        np.testing.assert_array_equal(sal_val, np.array([3]))

    def start_event_listener(self):

        es = EventSubscriber(event_type=OT.DataProcessStatusEvent, callback=self.validate_event)
        es.start()

        self.addCleanup(es.stop)

    def validate_transform_event(self, *args, **kwargs):
        """
        This method is a callback function for receiving DataProcessStatusEvent.
        """
        status_alert_event = args[0]

        np.testing.assert_array_equal(status_alert_event.origin, self.stream_id )
        np.testing.assert_array_equal(status_alert_event.values, np.array([self.event_data_process_id]))
        log.debug("DeviceStatusAlertEvent: %s" ,  str(status_alert_event.__dict__))
        self.event_verified.set()


    def start_event_transform_listener(self):
        es = EventSubscriber(event_type=OT.DeviceStatusAlertEvent, callback=self.validate_transform_event)
        es.start()

        self.addCleanup(es.stop)


    def test_download(self):
        egg_url = 'http://sddevrepo.oceanobservatories.org/releases/ion_example-0.1-py2.7.egg'
        egg_path = TransformWorker.download_egg(egg_url)

        import pkg_resources
        pkg_resources.working_set.add_entry(egg_path)

        from ion_example.add_arrays import add_arrays

        a = add_arrays(1,2)
        self.assertEquals(a,3)
class TestInstrumentManagementServiceIntegration(IonIntegrationTestCase):

    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()
        #container = Container()
        #print 'starting container'
        #container.start()
        #print 'started container'

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        self.RR   = ResourceRegistryServiceClient(node=self.container.node)
        self.IMS  = InstrumentManagementServiceClient(node=self.container.node)
        self.IDS  = IdentityManagementServiceClient(node=self.container.node)
        self.PSC  = PubsubManagementServiceClient(node=self.container.node)
        self.DP   = DataProductManagementServiceClient(node=self.container.node)
        self.DAMS = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.DSC  = DatasetManagementServiceClient(node=self.container.node)
        self.PDC  = ProcessDispatcherServiceClient(node=self.container.node)

        print 'started services'

#    @unittest.skip('this test just for debugging setup')
#    def test_just_the_setup(self):
#        return

    @attr('EXT')
    def test_resources_associations_extensions(self):
        """
        create one of each resource and association used by IMS
        to guard against problems in ion-definitions
        """
        
        #stuff we control
        instrument_agent_instance_id, _ =  self.RR.create(any_old(RT.InstrumentAgentInstance))
        instrument_agent_id, _ =           self.RR.create(any_old(RT.InstrumentAgent))
        instrument_model_id, _ =           self.RR.create(any_old(RT.InstrumentModel))
        instrument_device_id, _ =          self.RR.create(any_old(RT.InstrumentDevice))
        platform_agent_instance_id, _ =    self.RR.create(any_old(RT.PlatformAgentInstance))
        platform_agent_id, _ =             self.RR.create(any_old(RT.PlatformAgent))
        platform_device_id, _ =            self.RR.create(any_old(RT.PlatformDevice))
        platform_model_id, _ =             self.RR.create(any_old(RT.PlatformModel))
        sensor_device_id, _ =              self.RR.create(any_old(RT.SensorDevice))
        sensor_model_id, _ =               self.RR.create(any_old(RT.SensorModel))

        #stuff we associate to
        data_producer_id, _      = self.RR.create(any_old(RT.DataProducer))
        org_id, _ =                self.RR.create(any_old(RT.Org))

        #instrument_agent_instance_id #is only a target
        
        #instrument_agent
        self.RR.create_association(instrument_agent_id, PRED.hasModel, instrument_model_id)
        self.RR.create_association(instrument_agent_instance_id, PRED.hasAgentDefinition, instrument_agent_id)

        #instrument_device
        self.RR.create_association(instrument_device_id, PRED.hasModel, instrument_model_id)
        self.RR.create_association(instrument_device_id, PRED.hasAgentInstance, instrument_agent_instance_id)
        self.RR.create_association(instrument_device_id, PRED.hasDataProducer, data_producer_id)
        self.RR.create_association(instrument_device_id, PRED.hasDevice, sensor_device_id)
        self.RR.create_association(org_id, PRED.hasResource, instrument_device_id)


        instrument_model_id #is only a target

        platform_agent_instance_id #is only a target
        
        #platform_agent
        self.RR.create_association(platform_agent_id, PRED.hasModel, platform_model_id)
        self.RR.create_association(platform_agent_instance_id, PRED.hasAgentDefinition, platform_agent_id)

        #platform_device
        self.RR.create_association(platform_device_id, PRED.hasModel, platform_model_id)
        self.RR.create_association(platform_device_id, PRED.hasAgentInstance, platform_agent_instance_id)
        self.RR.create_association(platform_device_id, PRED.hasDevice, instrument_device_id)

        platform_model_id #is only a target

        #sensor_device
        self.RR.create_association(sensor_device_id, PRED.hasModel, sensor_model_id)
        self.RR.create_association(sensor_device_id, PRED.hasDevice, instrument_device_id)

        sensor_model_id #is only a target

        #create a parsed product for this instrument output
        tdom, sdom = time_series_domain()
        tdom = tdom.dump()
        sdom = sdom.dump()
        dp_obj = IonObject(RT.DataProduct,
            name='the parsed data',
            description='ctd stream test',
            processing_level_code='Parsed_Canonical',
            temporal_domain = tdom,
            spatial_domain = sdom)
        pdict_id = self.DSC.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        parsed_stream_def_id = self.PSC.create_stream_definition(name='parsed', parameter_dictionary_id=pdict_id)
        data_product_id1 = self.DP.create_data_product(data_product=dp_obj, stream_definition_id=parsed_stream_def_id)
        log.debug( 'new dp_id = %s', data_product_id1)

        self.DAMS.assign_data_product(input_resource_id=instrument_device_id, data_product_id=data_product_id1)


        def addInstOwner(inst_id, subject):

            actor_identity_obj = any_old(RT.ActorIdentity, {"name": subject})
            user_id = self.IDS.create_actor_identity(actor_identity_obj)
            user_info_obj = any_old(RT.UserInfo)
            user_info_id = self.IDS.create_user_info(user_id, user_info_obj)

            self.RR.create_association(inst_id, PRED.hasOwner, user_id)


        #Testing multiple instrument owners
        addInstOwner(instrument_device_id, "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Roger Unwin A254")
        addInstOwner(instrument_device_id, "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Bob Cumbers A256")

        extended_instrument = self.IMS.get_instrument_device_extension(instrument_device_id)

        self.assertEqual(instrument_device_id, extended_instrument._id)
        self.assertEqual(len(extended_instrument.owners), 2)
        self.assertEqual(extended_instrument.instrument_model._id, instrument_model_id)


        #check model
        inst_model_obj = self.RR.read(instrument_model_id)
        self.assertEqual(inst_model_obj.name, extended_instrument.instrument_model.name)

        #check agent instance
        inst_agent_instance_obj = self.RR.read(instrument_agent_instance_id)
        self.assertEqual(inst_agent_instance_obj.name, extended_instrument.agent_instance.name)

        #check agent
        inst_agent_obj = self.RR.read(instrument_agent_id)
        #compound assoc return list of lists so check the first element
        self.assertEqual(inst_agent_obj.name, extended_instrument.instrument_agent[0].name)

        #check platform device
        plat_device_obj = self.RR.read(platform_device_id)
        self.assertEqual(plat_device_obj.name, extended_instrument.platform_device.name)

        #check sensor devices
        self.assertEqual(1, len(extended_instrument.sensor_devices))

        #check data_product_parameters_set
        self.assertEqual(ComputedValueAvailability.PROVIDED,
                         extended_instrument.computed.data_product_parameters_set.status)
        self.assertTrue( 'Parsed_Canonical' in extended_instrument.computed.data_product_parameters_set.value)
        # the ctd parameters should include 'temp'
        self.assertTrue( 'temp' in extended_instrument.computed.data_product_parameters_set.value['Parsed_Canonical'])

        #none of these will work because there is no agent
        self.assertEqual(ComputedValueAvailability.NOTAVAILABLE,
                         extended_instrument.computed.firmware_version.status)
        self.assertEqual(ComputedValueAvailability.NOTAVAILABLE,
                         extended_instrument.computed.operational_state.status)
        self.assertEqual(ComputedValueAvailability.PROVIDED,
                         extended_instrument.computed.power_status_roll_up.status)
        self.assertEqual(ComputedValueAvailability.PROVIDED,
                         extended_instrument.computed.communications_status_roll_up.status)
        self.assertEqual(ComputedValueAvailability.PROVIDED,
                         extended_instrument.computed.data_status_roll_up.status)
        self.assertEqual(StatusType.STATUS_OK,
                        extended_instrument.computed.data_status_roll_up.value)
        self.assertEqual(ComputedValueAvailability.PROVIDED,
                         extended_instrument.computed.location_status_roll_up.status)

#        self.assertEqual(ComputedValueAvailability.PROVIDED,
#                         extended_instrument.computed.recent_events.status)
#        self.assertEqual([], extended_instrument.computed.recent_events.value)


        # cleanup
        c = DotDict()
        c.resource_registry = self.RR
        resource_impl = ResourceImpl(c)
        resource_impl.pluck(instrument_agent_id)
        resource_impl.pluck(instrument_model_id)
        resource_impl.pluck(instrument_device_id)
        resource_impl.pluck(platform_agent_id)
        self.IMS.force_delete_instrument_agent(instrument_agent_id)
        self.IMS.force_delete_instrument_model(instrument_model_id)
        self.IMS.force_delete_instrument_device(instrument_device_id)
        self.IMS.force_delete_platform_agent_instance(platform_agent_instance_id)
        self.IMS.force_delete_platform_agent(platform_agent_id)
        self.IMS.force_delete_platform_device(platform_device_id)
        self.IMS.force_delete_platform_model(platform_model_id)
        self.IMS.force_delete_sensor_device(sensor_device_id)
        self.IMS.force_delete_sensor_model(sensor_model_id)

        #stuff we associate to
        self.RR.delete(data_producer_id)
        self.RR.delete(org_id)



    def test_custom_attributes(self):
        """
        Test assignment of custom attributes
        """

        instrument_model_id, _ =           self.RR.create(any_old(RT.InstrumentModel,
                {"custom_attributes":
                         {"favorite_color": "attr desc goes here"}
            }))
        instrument_device_id, _ =          self.RR.create(any_old(RT.InstrumentDevice,
                {"custom_attributes":
                         {"favorite_color": "red",
                          "bogus_attr": "should raise warning"
                     }
            }))

        self.IMS.assign_instrument_model_to_instrument_device(instrument_model_id, instrument_device_id)

        # cleanup
        self.IMS.force_delete_instrument_device(instrument_device_id)
        self.IMS.force_delete_instrument_model(instrument_model_id)






    def _get_datastore(self, dataset_id):
        dataset = self.DSC.read_dataset(dataset_id)
        datastore_name = dataset.datastore_name
        datastore = self.container.datastore_manager.get_datastore(datastore_name, DataStore.DS_PROFILE.SCIDATA)
        return datastore




    def test_checkpoint_restore(self):

        # Create InstrumentModel
        instModel_obj = IonObject(RT.InstrumentModel,
                                  name='SBE37IMModel',
                                  description="SBE37IMModel",
                                  stream_configuration= {'raw': 'ctd_raw_param_dict' , 'parsed': 'ctd_parsed_param_dict' })
        instModel_id = self.IMS.create_instrument_model(instModel_obj)
        log.debug( 'new InstrumentModel id = %s ', instModel_id)

        # Create InstrumentAgent
        instAgent_obj = IonObject(RT.InstrumentAgent,
                                  name='agent007',
                                  description="SBE37IMAgent",
                                  driver_module="mi.instrument.seabird.sbe37smb.ooicore.driver",
                                  driver_class="SBE37Driver" )
        instAgent_id = self.IMS.create_instrument_agent(instAgent_obj)
        log.debug( 'new InstrumentAgent id = %s', instAgent_id)

        self.IMS.assign_instrument_model_to_instrument_agent(instModel_id, instAgent_id)

        # Create InstrumentDevice
        log.debug('test_activateInstrumentSample: Create instrument resource to represent the SBE37 '
        + '(SA Req: L4-CI-SA-RQ-241) ')
        instDevice_obj = IonObject(RT.InstrumentDevice,
                                   name='SBE37IMDevice',
                                   description="SBE37IMDevice",
                                   serial_number="12345" )
        instDevice_id = self.IMS.create_instrument_device(instrument_device=instDevice_obj)
        self.IMS.assign_instrument_model_to_instrument_device(instModel_id, instDevice_id)

        log.debug("test_activateInstrumentSample: new InstrumentDevice id = %s    (SA Req: L4-CI-SA-RQ-241) ",
                  instDevice_id)

        port_agent_config = {
            'device_addr': 'sbe37-simulator.oceanobservatories.org',
            'device_port': 4001,
            'process_type': PortAgentProcessType.UNIX,
            'binary_path': "port_agent",
            'command_port': 4002,
            'data_port': 4003,
            'log_level': 5,
            }

        instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='SBE37IMAgentInstance',
                                          description="SBE37IMAgentInstance",
                                          comms_device_address='sbe37-simulator.oceanobservatories.org',
                                          comms_device_port=4001,
                                          port_agent_config = port_agent_config)


        instAgentInstance_id = self.IMS.create_instrument_agent_instance(instAgentInstance_obj,
                                                                               instAgent_id,
                                                                               instDevice_id)

        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()


        spdict_id = self.DSC.read_parameter_dictionary_by_name('ctd_parsed_param_dict')
        parsed_stream_def_id = self.PSC.create_stream_definition(name='parsed', parameter_dictionary=spdict_id)

        rpdict_id = self.DSC.read_parameter_dictionary_by_name('ctd_raw_param_dict')
        raw_stream_def_id = self.PSC.create_stream_definition(name='raw', parameter_dictionary=rpdict_id)


        #-------------------------------
        # Create Raw and Parsed Data Products for the device
        #-------------------------------

        dp_obj = IonObject(RT.DataProduct,
                           name='the parsed data',
                           description='ctd stream test',
                           temporal_domain = tdom,
                           spatial_domain = sdom)

        data_product_id1 = self.DP.create_data_product(data_product=dp_obj,
                                                       stream_definition_id=parsed_stream_def_id,
                                                       parameter_dictionary=spdict_id)
        log.debug( 'new dp_id = %s', data_product_id1)

        self.DAMS.assign_data_product(input_resource_id=instDevice_id, data_product_id=data_product_id1)



        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.RR.find_objects(data_product_id1, PRED.hasStream, None, True)
        log.debug( 'Data product streams1 = %s', stream_ids)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        dataset_ids, _ = self.RR.find_objects(data_product_id1, PRED.hasDataset, RT.Dataset, True)
        log.debug( 'Data set for data_product_id1 = %s', dataset_ids[0])
        self.parsed_dataset = dataset_ids[0]
        #create the datastore at the beginning of each int test that persists data
        self._get_datastore(self.parsed_dataset)

        self.DP.activate_data_product_persistence(data_product_id=data_product_id1)


        dp_obj = IonObject(RT.DataProduct,
                           name='the raw data',
                           description='raw stream test',
                           temporal_domain = tdom,
                           spatial_domain = sdom)

        data_product_id2 = self.DP.create_data_product(data_product=dp_obj,
                                                       stream_definition_id=raw_stream_def_id,
                                                       parameter_dictionary=rpdict_id)
        log.debug( 'new dp_id = %s', str(data_product_id2))

        self.DAMS.assign_data_product(input_resource_id=instDevice_id, data_product_id=data_product_id2)

        self.DP.activate_data_product_persistence(data_product_id=data_product_id2)

        # spin up agent
        self.IMS.start_instrument_agent_instance(instrument_agent_instance_id=instAgentInstance_id)


        self.addCleanup(self.IMS.stop_instrument_agent_instance,
                        instrument_agent_instance_id=instAgentInstance_id)

        #wait for start
        instance_obj = self.IMS.read_instrument_agent_instance(instAgentInstance_id)
        gate = ProcessStateGate(self.PDC.read_process,
                                instance_obj.agent_process_id,
                                ProcessStateEnum.RUNNING)
        self.assertTrue(gate.await(30), "The instrument agent instance (%s) did not spawn in 30 seconds" %
                                        instance_obj.agent_process_id)


        # take snapshot of config
        snap_id = self.IMS.agent_state_checkpoint(instDevice_id, "xyzzy snapshot")
        snap_obj = self.RR.read_attachment(snap_id, include_content=True)
        print "Saved config:"
        print snap_obj.content

        #modify config
        instance_obj.driver_config["comms_config"] = "BAD_DATA"
        self.RR.update(instance_obj)

        #restore config
        self.IMS.agent_state_restore(instDevice_id, snap_id)
        instance_obj = self.RR.read(instAgentInstance_id)
        self.assertNotEqual("BAD_DATA", instance_obj.driver_config["comms_config"])
class TestAgentLaunchOps(IonIntegrationTestCase):

    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()
        #container = Container()
        #print 'starting container'
        #container.start()
        #print 'started container'
        unittest # suppress an pycharm inspector error if all unittest.skip references are commented out

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        self.RR   = ResourceRegistryServiceClient(node=self.container.node)
        self.IMS  = InstrumentManagementServiceClient(node=self.container.node)
        self.IDS  = IdentityManagementServiceClient(node=self.container.node)
        self.PSC  = PubsubManagementServiceClient(node=self.container.node)
        self.DP   = DataProductManagementServiceClient(node=self.container.node)
        self.DAMS = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.DSC  = DatasetManagementServiceClient(node=self.container.node)
        self.PDC  = ProcessDispatcherServiceClient(node=self.container.node)
        self.OMS = ObservatoryManagementServiceClient(node=self.container.node)
        self.RR2 = EnhancedResourceRegistryClient(self.RR)

#    @unittest.skip('this test just for debugging setup')
#    def test_just_the_setup(self):
#        return


    def test_get_agent_client_noprocess(self):
        inst_device_id = self.RR2.create(any_old(RT.InstrumentDevice))

        iap = ResourceAgentClient._get_agent_process_id(inst_device_id)

        # should be no running agent
        self.assertIsNone(iap)

        # should raise NotFound
        self.assertRaises(NotFound, ResourceAgentClient, inst_device_id)



    def test_resource_state_save_restore(self):

        # Create InstrumentModel
        instModel_obj = IonObject(RT.InstrumentModel,
                                  name='SBE37IMModel',
                                  description="SBE37IMModel")
        instModel_id = self.IMS.create_instrument_model(instModel_obj)
        log.debug( 'new InstrumentModel id = %s ', instModel_id)

        # Create InstrumentAgent
        raw_config = StreamConfiguration(stream_name='raw', parameter_dictionary_name='ctd_raw_param_dict' )
        parsed_config = StreamConfiguration(stream_name='parsed', parameter_dictionary_name='ctd_parsed_param_dict')
        instAgent_obj = IonObject(RT.InstrumentAgent,
                                  name='agent007',
                                  description="SBE37IMAgent",
                                  driver_uri=DRV_URI_GOOD,
                                  stream_configurations = [raw_config, parsed_config] )
        instAgent_id = self.IMS.create_instrument_agent(instAgent_obj)
        log.debug( 'new InstrumentAgent id = %s', instAgent_id)

        self.IMS.assign_instrument_model_to_instrument_agent(instModel_id, instAgent_id)

        # Create InstrumentDevice
        log.debug('test_activateInstrumentSample: Create instrument resource to represent the SBE37 '
        + '(SA Req: L4-CI-SA-RQ-241) ')
        instDevice_obj = IonObject(RT.InstrumentDevice,
                                   name='SBE37IMDevice',
                                   description="SBE37IMDevice",
                                   serial_number="12345" )
        instDevice_id = self.IMS.create_instrument_device(instrument_device=instDevice_obj)
        self.IMS.assign_instrument_model_to_instrument_device(instModel_id, instDevice_id)

        log.debug("test_activateInstrumentSample: new InstrumentDevice id = %s    (SA Req: L4-CI-SA-RQ-241) ",
                  instDevice_id)

        port_agent_config = {
            'device_addr':  CFG.device.sbe37.host,
            'device_port':  CFG.device.sbe37.port,
            'process_type': PortAgentProcessType.UNIX,
            'binary_path': "port_agent",
            'port_agent_addr': 'localhost',
            'command_port': CFG.device.sbe37.port_agent_cmd_port,
            'data_port': CFG.device.sbe37.port_agent_data_port,
            'log_level': 5,
            'type': PortAgentType.ETHERNET
        }

        instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='SBE37IMAgentInstance',
                                          description="SBE37IMAgentInstance",
                                          port_agent_config = port_agent_config)


        instAgentInstance_id = self.IMS.create_instrument_agent_instance(instAgentInstance_obj,
                                                                               instAgent_id,
                                                                               instDevice_id)

        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()


        spdict_id = self.DSC.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        parsed_stream_def_id = self.PSC.create_stream_definition(name='parsed', parameter_dictionary_id=spdict_id)

        rpdict_id = self.DSC.read_parameter_dictionary_by_name('ctd_raw_param_dict', id_only=True)
        raw_stream_def_id = self.PSC.create_stream_definition(name='raw', parameter_dictionary_id=rpdict_id)


        #-------------------------------
        # Create Raw and Parsed Data Products for the device
        #-------------------------------

        dp_obj = IonObject(RT.DataProduct,
                           name='the parsed data',
                           description='ctd stream test',
                           temporal_domain = tdom,
                           spatial_domain = sdom)

        data_product_id1 = self.DP.create_data_product(data_product=dp_obj,
                                                       stream_definition_id=parsed_stream_def_id)
                                                       
        log.debug( 'new dp_id = %s', data_product_id1)

        self.DAMS.assign_data_product(input_resource_id=instDevice_id, data_product_id=data_product_id1)
        self.DP.activate_data_product_persistence(data_product_id=data_product_id1)
        self.addCleanup(self.DP.suspend_data_product_persistence, data_product_id1)



        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.RR.find_objects(data_product_id1, PRED.hasStream, None, True)
        log.debug( 'Data product streams1 = %s', stream_ids)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        dataset_ids, _ = self.RR.find_objects(data_product_id1, PRED.hasDataset, RT.Dataset, True)
        log.debug( 'Data set for data_product_id1 = %s', dataset_ids[0])
        self.parsed_dataset = dataset_ids[0]
        #create the datastore at the beginning of each int test that persists data



        dp_obj = IonObject(RT.DataProduct,
                           name='the raw data',
                           description='raw stream test',
                           temporal_domain = tdom,
                           spatial_domain = sdom)

        data_product_id2 = self.DP.create_data_product(data_product=dp_obj,
                                                       stream_definition_id=raw_stream_def_id)
        log.debug( 'new dp_id = %s', str(data_product_id2))

        self.DAMS.assign_data_product(input_resource_id=instDevice_id, data_product_id=data_product_id2)

        self.DP.activate_data_product_persistence(data_product_id=data_product_id2)
        self.addCleanup(self.DP.suspend_data_product_persistence, data_product_id2)

        # spin up agent
        self.IMS.start_instrument_agent_instance(instrument_agent_instance_id=instAgentInstance_id)


        self.addCleanup(self.IMS.stop_instrument_agent_instance,
                        instrument_agent_instance_id=instAgentInstance_id)

        #wait for start
        instance_obj = self.IMS.read_instrument_agent_instance(instAgentInstance_id)
        gate = AgentProcessStateGate(self.PDC.read_process,
                                     instDevice_id,
                                     ProcessStateEnum.RUNNING)
        self.assertTrue(gate.await(30), "The instrument agent instance (%s) did not spawn in 30 seconds" %
                                        gate.process_id)


        # take snapshot of config
        snap_id = self.IMS.save_resource_state(instDevice_id, "xyzzy snapshot")
        snap_obj = self.RR.read_attachment(snap_id, include_content=True)


        #modify config
        instance_obj.driver_config["comms_config"] = "BAD_DATA"
        self.RR.update(instance_obj)

        #restore config
        self.IMS.restore_resource_state(instDevice_id, snap_id)
        instance_obj = self.RR.read(instAgentInstance_id)

        if "BAD_DATA" == instance_obj.driver_config["comms_config"]:
            print "Saved config:"
            print snap_obj.content
            self.fail("Saved config was not properly restored")

        self.assertNotEqual("BAD_DATA", instance_obj.driver_config["comms_config"])

        
        self.DP.delete_data_product(data_product_id1)
        self.DP.delete_data_product(data_product_id2)


    def test_agent_instance_config_hasDevice(self):
        def assign_fn(child_device_id, parent_device_id):
            self.RR2.create_association(parent_device_id, PRED.hasDevice, child_device_id)

        def find_fn(parent_device_id):
            ret, _ = self.RR.find_objects(subject=parent_device_id, predicate=PRED.hasDevice, id_only=True)
            return ret

        self.base_agent_instance_config(assign_fn, find_fn)
        log.info("END test_agent_instance_config_hasDevice")

    def test_agent_instance_config_hasNetworkParent(self):
        def assign_fn(child_device_id, parent_device_id):
            self.RR2.create_association(child_device_id, PRED.hasNetworkParent, parent_device_id)

        def find_fn(parent_device_id):
            ret, _ = self.RR.find_subjects(object=parent_device_id, predicate=PRED.hasNetworkParent, id_only=True)
            return ret

        self.base_agent_instance_config(assign_fn, find_fn)
        log.info("END test_agent_instance_config_hasNetworkParent")

    def base_agent_instance_config(self, 
                                   assign_child_platform_to_parent_platform_fn, 
                                   find_child_platform_ids_of_parent_platform_fn):
        """
        Verify that agent configurations are being built properly
        """
        clients = DotDict()
        clients.resource_registry  = self.RR
        clients.pubsub_management  = self.PSC
        clients.dataset_management = self.DSC
        config_builder = DotDict
        config_builder.i = None
        config_builder.p = None

        def refresh_pconfig_builder_hack(config_builder):
            """
            ugly hack to get around "idempotent" RR2 caching
            remove after https://github.com/ooici/coi-services/pull/1190
            """
            config_builder.p = PlatformAgentConfigurationBuilder(clients)

        def refresh_iconfig_builder_hack(config_builder):
            """
            ugly hack to get around "idempotent" RR2 caching
            remove after https://github.com/ooici/coi-services/pull/1190
            """
            config_builder.i = InstrumentAgentConfigurationBuilder(clients)


        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()

        org_obj = any_old(RT.Org)
        org_id = self.RR2.create(org_obj)

        inst_startup_config = {'startup': 'config'}

        generic_alerts_config = [ {'lvl2': 'lvl3val'} ]

        required_config_keys = [
            'org_governance_name',
            'device_type',
            'agent',
            'driver_config',
            'stream_config',
            'startup_config',
            'aparam_alerts_config',
            'children']



        def verify_instrument_config(config, device_id):
            for key in required_config_keys:
                self.assertIn(key, config)
            self.assertEqual(org_obj.org_governance_name, config['org_governance_name'])
            self.assertEqual(RT.InstrumentDevice, config['device_type'])
            self.assertIn('driver_config', config)
            driver_config = config['driver_config']
            expected_driver_fields = {'process_type': ('ZMQPyClassDriverLauncher',),
                                      }
            for k, v in expected_driver_fields.iteritems():
                self.assertIn(k, driver_config)
                self.assertEqual(v, driver_config[k])
            self.assertEqual

            self.assertEqual({'resource_id': device_id}, config['agent'])
            self.assertEqual(inst_startup_config, config['startup_config'])
            self.assertIn('aparam_alerts_config', config)
            self.assertEqual(generic_alerts_config, config['aparam_alerts_config'])
            self.assertIn('stream_config', config)
            for key in ['children']:
                self.assertEqual({}, config[key])


        def verify_child_config(config, device_id, inst_device_id=None):
            for key in required_config_keys:
                self.assertIn(key, config)
            self.assertEqual(org_obj.org_governance_name, config['org_governance_name'])
            self.assertEqual(RT.PlatformDevice, config['device_type'])
            self.assertEqual({'resource_id': device_id}, config['agent'])
            self.assertIn('aparam_alerts_config', config)
            self.assertEqual(generic_alerts_config, config['aparam_alerts_config'])
            self.assertIn('stream_config', config)
            self.assertIn('driver_config', config)
            self.assertIn('foo', config['driver_config'])
            self.assertIn('ports', config['driver_config'])
            self.assertEqual('bar', config['driver_config']['foo'])
            self.assertIn('process_type', config['driver_config'])
            self.assertEqual(('ZMQPyClassDriverLauncher',), config['driver_config']['process_type'])

            if None is inst_device_id:
                for key in ['children', 'startup_config']:
                    self.assertEqual({}, config[key])
            else:
                for key in ['startup_config']:
                    self.assertEqual({}, config[key])

                self.assertIn(inst_device_id, config['children'])
                verify_instrument_config(config['children'][inst_device_id], inst_device_id)

            if config['driver_config']['ports']:
                self.assertTrue( isinstance(config['driver_config']['ports'], dict) )



        def verify_parent_config(config, parent_device_id, child_device_id, inst_device_id=None):
            for key in required_config_keys:
                self.assertIn(key, config)
            self.assertEqual(org_obj.org_governance_name, config['org_governance_name'])
            self.assertEqual(RT.PlatformDevice, config['device_type'])
            self.assertIn('process_type', config['driver_config'])
            self.assertIn('ports', config['driver_config'])
            self.assertEqual(('ZMQPyClassDriverLauncher',), config['driver_config']['process_type'])
            self.assertEqual({'resource_id': parent_device_id}, config['agent'])
            self.assertIn('aparam_alerts_config', config)
            self.assertEqual(generic_alerts_config, config['aparam_alerts_config'])
            self.assertIn('stream_config', config)
            for key in ['startup_config']:
                self.assertEqual({}, config[key])

            if config['driver_config']['ports']:
                self.assertTrue( isinstance(config['driver_config']['ports'], dict) )

            self.assertIn(child_device_id, config['children'])
            verify_child_config(config['children'][child_device_id], child_device_id, inst_device_id)






        rpdict_id = self.DSC.read_parameter_dictionary_by_name('ctd_raw_param_dict', id_only=True)
        raw_stream_def_id = self.PSC.create_stream_definition(name='raw', parameter_dictionary_id=rpdict_id)
        #todo: create org and figure out which agent resource needs to get assigned to it


        def _make_platform_agent_structure(name='', agent_config=None):
            if None is agent_config: agent_config = {}

            # instance creation
            platform_agent_instance_obj = any_old(RT.PlatformAgentInstance, {'driver_config': {'foo': 'bar'},
                                                                             'alerts': generic_alerts_config})
            platform_agent_instance_obj.agent_config = agent_config
            platform_agent_instance_id = self.IMS.create_platform_agent_instance(platform_agent_instance_obj)

            # agent creation
            raw_config = StreamConfiguration(stream_name='raw', parameter_dictionary_name='ctd_raw_param_dict' )
            platform_agent_obj = any_old(RT.PlatformAgent, {"stream_configurations":[raw_config]})
            platform_agent_id = self.IMS.create_platform_agent(platform_agent_obj)

            # device creation
            platform_device_id = self.IMS.create_platform_device(any_old(RT.PlatformDevice))

            # data product creation
            dp_obj = any_old(RT.DataProduct, {"temporal_domain":tdom, "spatial_domain": sdom})
            dp_id = self.DP.create_data_product(data_product=dp_obj, stream_definition_id=raw_stream_def_id)
            self.DAMS.assign_data_product(input_resource_id=platform_device_id, data_product_id=dp_id)
            self.DP.activate_data_product_persistence(data_product_id=dp_id)
            self.addCleanup(self.DP.suspend_data_product_persistence, dp_id)


            #deployment creation
            site_obj = IonObject(RT.PlatformSite, name='sitePlatform')
            site_id = self.OMS.create_platform_site(platform_site=site_obj)

            # find current deployment using time constraints
            current_time =  int( calendar.timegm(time.gmtime()) )
            # two years on either side of current time
            start = current_time - 63115200
            end = current_time + 63115200
            temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=str(start), end_datetime=str(end))
            platform_port_obj= IonObject(OT.PlatformPort, reference_designator = 'GA01SUMO-FI003-09-CTDMO0999',
                                                            port_type=PortTypeEnum.UPLINK,
                                                            ip_address=0)
            deployment_obj = IonObject(RT.Deployment,
                                       name='TestPlatformDeployment_' + name,
                                       description='some new deployment',
                                       context=IonObject(OT.CabledNodeDeploymentContext),
                                       constraint_list=[temporal_bounds],
                                       port_assignments={platform_device_id:platform_port_obj})

            deploy_id = self.OMS.create_deployment(deployment=deployment_obj, site_id=site_id, device_id=platform_device_id)

            # assignments
            self.RR2.assign_platform_agent_instance_to_platform_device_with_has_agent_instance(platform_agent_instance_id, platform_device_id)
            self.RR2.assign_platform_agent_to_platform_agent_instance_with_has_agent_definition(platform_agent_id, platform_agent_instance_id)
            self.RR2.assign_platform_device_to_org_with_has_resource(platform_agent_instance_id, org_id)

            return platform_agent_instance_id, platform_agent_id, platform_device_id


        def _make_instrument_agent_structure(agent_config=None):
            if None is agent_config: agent_config = {}

            # instance creation
            instrument_agent_instance_obj = any_old(RT.InstrumentAgentInstance, {"startup_config": inst_startup_config,
                                                                                 'alerts': generic_alerts_config})
            instrument_agent_instance_obj.agent_config = agent_config
            instrument_agent_instance_id = self.IMS.create_instrument_agent_instance(instrument_agent_instance_obj)

            # agent creation
            raw_config = StreamConfiguration(stream_name='raw',
                                             parameter_dictionary_name='ctd_raw_param_dict' )
            instrument_agent_obj = any_old(RT.InstrumentAgent, {"stream_configurations":[raw_config]})
            instrument_agent_id = self.IMS.create_instrument_agent(instrument_agent_obj)

            # device creation
            instrument_device_id = self.IMS.create_instrument_device(any_old(RT.InstrumentDevice))

            # data product creation
            dp_obj = any_old(RT.DataProduct, {"temporal_domain":tdom, "spatial_domain": sdom})
            dp_id = self.DP.create_data_product(data_product=dp_obj, stream_definition_id=raw_stream_def_id)
            self.DAMS.assign_data_product(input_resource_id=instrument_device_id, data_product_id=dp_id)
            self.DP.activate_data_product_persistence(data_product_id=dp_id)
            self.addCleanup(self.DP.suspend_data_product_persistence, dp_id)

            #deployment creation
            site_obj = IonObject(RT.InstrumentSite, name='siteInstrument')
            site_id = self.OMS.create_instrument_site(instrument_site =site_obj)

            # find current deployment using time constraints
            current_time =  int( calendar.timegm(time.gmtime()) )
            # two years on either side of current time
            start = current_time - 63115200
            end = current_time + 63115200
            temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=str(start), end_datetime=str(end))
            platform_port_obj= IonObject(OT.PlatformPort, reference_designator = 'GA01SUMO-FI003-08-CTDMO0888',
                                                            port_type=PortTypeEnum.PAYLOAD,
                                                            ip_address=0)
            deployment_obj = IonObject(RT.Deployment,
                                       name='TestDeployment for Cabled Instrument',
                                       description='some new deployment',
                                       context=IonObject(OT.CabledInstrumentDeploymentContext),
                                       constraint_list=[temporal_bounds],
                                       port_assignments={instrument_device_id:platform_port_obj})

            deploy_id = self.OMS.create_deployment(deployment=deployment_obj, site_id=site_id, device_id=instrument_device_id)


            # assignments
            self.RR2.assign_instrument_agent_instance_to_instrument_device_with_has_agent_instance(instrument_agent_instance_id, instrument_device_id)
            self.RR2.assign_instrument_agent_to_instrument_agent_instance_with_has_agent_definition(instrument_agent_id, instrument_agent_instance_id)
            self.RR2.assign_instrument_device_to_org_with_has_resource(instrument_agent_instance_id, org_id)

            return instrument_agent_instance_id, instrument_agent_id, instrument_device_id



        # can't do anything without an agent instance obj
        log.debug("Testing that preparing a launcher without agent instance raises an error")
        refresh_pconfig_builder_hack(config_builder) # associations have changed since builder was instantiated
        self.assertRaises(AssertionError, config_builder.p.prepare, will_launch=False)

        log.debug("Making the structure for a platform agent, which will be the child")
        platform_agent_instance_child_id, _, platform_device_child_id  = _make_platform_agent_structure(name='child')
        platform_agent_instance_child_obj = self.RR2.read(platform_agent_instance_child_id)

        log.debug("Preparing a valid agent instance launch, for config only")
        refresh_pconfig_builder_hack(config_builder) # associations have changed since builder was instantiated
        config_builder.p.set_agent_instance_object(platform_agent_instance_child_obj)
        child_config = config_builder.p.prepare(will_launch=False)
        verify_child_config(child_config, platform_device_child_id)


        log.debug("Making the structure for a platform agent, which will be the parent")
        platform_agent_instance_parent_id, _, platform_device_parent_id  = _make_platform_agent_structure(name='parent')
        platform_agent_instance_parent_obj = self.RR2.read(platform_agent_instance_parent_id)

        log.debug("Testing child-less parent as a child config")
        refresh_pconfig_builder_hack(config_builder) # associations have changed since builder was instantiated
        config_builder.p.set_agent_instance_object(platform_agent_instance_parent_obj)
        parent_config = config_builder.p.prepare(will_launch=False)
        verify_child_config(parent_config, platform_device_parent_id)

        log.debug("assigning child platform to parent")
        assign_child_platform_to_parent_platform_fn(platform_device_child_id, platform_device_parent_id)

        child_device_ids = find_child_platform_ids_of_parent_platform_fn(platform_device_parent_id)
        self.assertNotEqual(0, len(child_device_ids))

        log.debug("Testing parent + child as parent config")
        refresh_pconfig_builder_hack(config_builder) # associations have changed since builder was instantiated
        config_builder.p.set_agent_instance_object(platform_agent_instance_parent_obj)
        parent_config = config_builder.p.prepare(will_launch=False)
        verify_parent_config(parent_config, platform_device_parent_id, platform_device_child_id)


        log.debug("making the structure for an instrument agent")
        instrument_agent_instance_id, _, instrument_device_id = _make_instrument_agent_structure()
        instrument_agent_instance_obj = self.RR2.read(instrument_agent_instance_id)

        log.debug("Testing instrument config")
        refresh_iconfig_builder_hack(config_builder) # associations have changed since builder was instantiated
        config_builder.i.set_agent_instance_object(instrument_agent_instance_obj)
        instrument_config = config_builder.i.prepare(will_launch=False)
        verify_instrument_config(instrument_config, instrument_device_id)

        log.debug("assigning instrument to platform")
        self.RR2.assign_instrument_device_to_platform_device_with_has_device(instrument_device_id, platform_device_child_id)

        child_device_ids = self.RR2.find_instrument_device_ids_of_platform_device_using_has_device(platform_device_child_id)
        self.assertNotEqual(0, len(child_device_ids))

        log.debug("Testing entire config")
        refresh_pconfig_builder_hack(config_builder) # associations have changed since builder was instantiated
        config_builder.p.set_agent_instance_object(platform_agent_instance_parent_obj)
        full_config = config_builder.p.prepare(will_launch=False)
        verify_parent_config(full_config, platform_device_parent_id, platform_device_child_id, instrument_device_id)

        #self.fail(parent_config)
        #plauncher.prepare(will_launch=False)
        log.info("END base_agent_instance_config")
Esempio n. 19
0
class TestInstrumentManagementServiceAgents(IonIntegrationTestCase):
    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()
        #container = Container()
        #print 'starting container'
        #container.start()
        #print 'started container'

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        self.RR = ResourceRegistryServiceClient(node=self.container.node)
        self.IMS = InstrumentManagementServiceClient(node=self.container.node)

        print 'started services'

    @unittest.skip('this test just for debugging setup')
    def test_just_the_setup(self):
        return

    def test_register_instrument_agent(self):

        #test ssh-ability
        cfg_host = CFG.service.instrument_management.driver_release_host  #'amoeaba.ucsd.edu'
        cfg_user = pwd.getpwuid(os.getuid())[0]

        remotehost = "%s@%s" % (cfg_user, cfg_host)

        ssh_retval = subprocess.call([
            "ssh", "-o", "PasswordAuthentication=no", remotehost, "-f", "true"
        ])

        if 0 != ssh_retval:
            raise unittest.SkipTest("SSH/SCP credentials to %s didn't work" %
                                    remotehost)

        inst_agent_id = self.IMS.create_instrument_agent(
            any_old(RT.InstrumentAgent))
        inst_model_id = self.IMS.create_instrument_model(
            any_old(RT.InstrumentModel))

        self.IMS.assign_instrument_model_to_instrument_agent(
            inst_model_id, inst_agent_id)

        self.IMS.register_instrument_agent(inst_agent_id, BASE64_EGG,
                                           BASE64_ZIPFILE)

        attachments, _ = self.RR.find_objects(inst_agent_id,
                                              PRED.hasAttachment,
                                              RT.Attachment, True)

        self.assertEqual(len(attachments), 4)

        for a_id in attachments:

            a = self.RR.read_attachment(a_id)

            parts = string.split(a.name, ".")

            if "txt" == parts[1]:
                self.assertEqual("text/plain", a.content_type)
                self.assertIn(parts[0], a.keywords)
                self.assertEqual(a.content, (parts[0] * 3) + "\n")

        log.info("L4-CI-SA-RQ-148")

        return