示例#1
0
文件: apps.py 项目: swarbhanu/pyon
    def start_app(self, appdef=None, config=None):
        """
        @brief Start an app from an app definition.
        Note: apps can come in one of 2 variants:
        1 processapp: In-line defined process to be started
        2 regular app: Full app definition
        """
        log.debug("AppManager.start_app(appdef=%s) ..." % appdef)

        appdef = DotDict(appdef)

        if 'config' in appdef:
            app_cfg = appdef.config.copy()
            if config:
                dict_merge(app_cfg, config, inplace=True)
            config = app_cfg

        if 'processapp' in appdef:
            # Case 1: Appdef contains definition of process to start
            name, module, cls = appdef.processapp
            try:
                pid = self.container.spawn_process(name, module, cls, config)
                appdef._pid = pid
                self.apps.append(appdef)
            except Exception, ex:
                log.exception("Appl %s start from processapp failed" % appdef.name)
示例#2
0
    def _run_consumer(self, ctag, queue_name, gqueue, callback):
        cnt = 0
        while True:
            m = gqueue.get()
            if isinstance(m, self.ConsumerClosedMessage):
                break
            exchange, routing_key, body, props = m

            # create method frame
            method_frame = DotDict()
            method_frame['consumer_tag'] = ctag
            method_frame['redelivered'] = False  # @TODO
            method_frame['exchange'] = exchange
            method_frame['routing_key'] = routing_key

            # create header frame
            header_frame = DotDict()
            header_frame['headers'] = props.copy()

            # make delivery tag for ack/reject later
            dtag = self._generate_dtag(ctag, cnt)
            cnt += 1

            with self._lock_unacked:
                self._unacked[dtag] = (ctag, queue_name, m)

            method_frame['delivery_tag'] = dtag

            # deliver to callback
            try:
                callback(self, method_frame, header_frame, body)
            except Exception:
                log.exception("delivering to consumer, ignore!")
示例#3
0
    def start_app(self, appdef=None, config=None):
        """
        @brief Start an app from an app definition.
        Note: apps can come in one of 2 variants:
        1 processapp: In-line defined process to be started
        2 regular app: Full app definition
        """
        log.debug("AppManager.start_app(appdef=%s) ..." % appdef)

        appdef = DotDict(appdef)

        if 'config' in appdef:
            app_cfg = appdef.config.copy()
            if config:
                dict_merge(app_cfg, config, inplace=True)
            config = app_cfg

        if 'processapp' in appdef:
            # Case 1: Appdef contains definition of process to start
            name, module, cls = appdef.processapp
            try:
                pid = self.container.spawn_process(name, module, cls, config)
                appdef._pid = pid
                self.apps.append(appdef)
            except Exception, ex:
                log.exception("Appl %s start from processapp failed" %
                              appdef.name)
示例#4
0
    def load_data_process(self, stream_id=""):

        dpms_client = DataProcessManagementServiceClient()

        dataprocess_details = dpms_client.read_data_process_for_stream(stream_id)
        dataprocess_details = DotDict(dataprocess_details or {})
        dataprocess_id = dataprocess_details.dataprocess_id

        #set metrics attributes
        dataprocess_details.granule_counter = 0

        self._dataprocesses[dataprocess_id] = dataprocess_details

        #add the stream id to the map
        if 'in_stream_id' in dataprocess_details:
            if dataprocess_details['in_stream_id'] in self._streamid_map:
                (self._streamid_map[ dataprocess_details['in_stream_id'] ]).append(dataprocess_id)
            else:
                self._streamid_map[ dataprocess_details['in_stream_id'] ]  = [dataprocess_id]
        #todo: add transform worker id
        self.event_publisher.publish_event(origin=dataprocess_id, origin_type='DataProcess', status=DataProcessStatusType.NORMAL,
                                           description='data process loaded into transform worker')

        #create a publisher for output stream
        self.create_publisher(dataprocess_id, dataprocess_details)

        return [dataprocess_id]
示例#5
0
    def setUp(self):
        super(DiscoveryIntTest, self).setUp()

        self._start_container()
        self.addCleanup(DiscoveryIntTest.es_cleanup)
        self.container.start_rel_from_url('res/deploy/r2dm.yml')

        self.discovery = DiscoveryServiceClient()
        self.catalog   = CatalogManagementServiceClient()
        self.ims       = IndexManagementServiceClient()
        self.rr        = ResourceRegistryServiceClient()

        if use_es:
            self.es_host   = CFG.get_safe('server.elasticsearch.host', 'localhost')
            self.es_port   = CFG.get_safe('server.elasticsearch.port', '9200')
            CFG.server.elasticsearch.shards         = 1
            CFG.server.elasticsearch.replicas       = 0
            CFG.server.elasticsearch.river_shards   = 1
            CFG.server.elasticsearch.river_replicas = 0
            self.es = ep.ElasticSearch(
                host=self.es_host,
                port=self.es_port,
                timeout=10,
                verbose=True
            )
            op = DotDict(CFG)
            op.op = 'clean_bootstrap'
            self.container.spawn_process('index_bootstrap','ion.processes.bootstrap.index_bootstrap','IndexBootStrap', op)
示例#6
0
    def test_create_catalog(self):
        self.rr_find_res.return_value = ([],[])
        fake_indexes = [
            DotDict({'_id':'1', 'name':'one', 'options' : {'attribute_match':['attr_field'],'range_fields':[], 'geo_fields':[] }}),
            DotDict({'_id':'1', 'name':'two', 'options' : {'attribute_match':[],'range_fields':['range_field'], 'geo_fields':[] }}),
            DotDict({'_id':'1', 'name':'three', 'options' : {'attribute_match':[],'range_fields':['range_field'], 'geo_fields':['geo_field'] }})
        ]
        def find_res(*args, **kwargs):
            retval = {}
            for d in fake_indexes:
                retval[d.name] = d
            return retval


        self.ims_list_indexes.side_effect = find_res

        self.rr_create.return_value = 'cat_id','rev'


        retval = self.catalog_management.create_catalog('catalog_name',['range_field','geo_field'])
        self.assertTrue(self.rr_create_assoc.call_count==1,
            'Not enough associations %d' % self.rr_create_assoc.call_count)
        retval = self.catalog_management.create_catalog('catalog_name',['range_field'])
        self.assertTrue(self.rr_create_assoc.call_count==3,
            'Not enough associations %d' % self.rr_create_assoc.call_count)
        self.assertTrue(retval=='cat_id')
示例#7
0
    def test_define_replay_no_data(self):
        #mocks
        self.mock_ps_create_stream.return_value = '12345'
        self.mock_rr_create.return_value = ('replay_id', 'garbage')
        self.mock_ds_read.return_value = DotDict({
            'datastore_name':
            'unittest',
            'view_name':
            'garbage',
            'primary_view_key':
            'primary key'
        })

        document = DotDict({'stream_resource_id': '0'})
        self.mock_pd_schedule.return_value = 'process_id'

        self.datastore.query_view.return_value = []  # Raises index error

        config = {
            'process': {
                'query': 'myquery',
                'datastore_name': 'unittest',
                'view_name': 'garbage',
                'key_id': 'primary key',
                'delivery_format': None,
                'publish_streams': {
                    'output': '12345'
                }
            }
        }

        with self.assertRaises(NotFound):
            self.data_retriever_service.define_replay(dataset_id='dataset_id',
                                                      query='myquery')
 def preload(self):
     config = DotDict()
     config.op = 'load'
     config.scenario='BETA'
     config.categories='Parser,Reference'
     config.path='master'
     self.container.spawn_process('ion_loader', 'ion.processes.bootstrap.ion_loader','IONLoader',config)
    def test_get_last_granule(self, mock_bb, dsm_cli, dsm):

        mock_bb().sync_rdt_with_coverage = Mock()
        mock_bb().to_granule.return_value = {'test':True}

        dsm_cli().read_dataset = Mock()
        dataset = DotDict()
        dataset.datastore_name = 'test'
        dataset.view_name = 'bogus/view'

        dsm._get_coverage = Mock()
        dsm._get_coverage.return_value = {}
        
        datastore = DotDict()
        datastore.query_view = Mock()
        datastore.query_view.return_value = [{'doc':{'ts_create':0}}]

        
        container = DotDict()
        container.datastore_manager.get_datastore = Mock()
        container.datastore_manager.get_datastore.return_value = datastore

        retval = self.replay.get_last_granule(container,'dataset_id')

        self.assertEquals(retval,{'test':True})
    def setUp(self):

        self.mock_backend = DotDict()
        self.mock_backend['create_definition'] = Mock()
        self.mock_backend['read_definition'] = Mock()
        self.mock_backend['read_definition_by_name'] = Mock()
        self.mock_backend['update_definition'] = Mock()
        self.mock_backend['delete_definition'] = Mock()
        self.mock_backend['create'] = Mock()
        self.mock_backend['schedule'] = Mock()
        self.mock_backend['read_process'] = Mock()
        self.mock_backend['list'] = Mock()
        self.mock_backend['cancel'] = Mock()
        self.mock_backend['set_system_boot'] = Mock()

        self.mock_dashi = DotDict()
        self.mock_dashi['handle'] = Mock()

        self.mock_pyon_dashi_exc_map = {
            NotFound: FakeDashiNotFoundError,
            BadRequest: FakeDashiBadRequestError,
            Conflict: FakeDashiWriteConflictError
        }

        self.pd_dashi_handler = PDDashiHandler(self.mock_backend,
                                               self.mock_dashi)
    def create_data_process_logger(self, data_product_id, clone_id, argument_map):
        '''
        Launches a data process that just prints input
        '''
        out_name = argument_map.values()[0]

        # Make the transfofm function
        tf_obj = IonObject(RT.TransformFunction,
                           name='stream_logger',
                           description='',
                           function='stream_logger',
                           module='ion.services.sa.test.test_data_process_functions',
                           arguments=['x'],
                           function_type=TransformFunctionType.TRANSFORM)
        func_id = self.data_process_management.create_transform_function(tf_obj)
        self.addCleanup(self.data_process_management.delete_transform_function, func_id)
        
        # Make the data process definition
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='stream_logger',
                            description='logs some stream stuff',
                            data_process_type=DataProcessTypeEnum.RETRIEVE_PROCESS)
        configuration = DotDict()
        configuration.publish_limit = 40
        dpd_id = self.data_process_management.create_data_process_definition(dpd_obj, func_id)
        data_process_id = self.data_process_management.create_data_process(
                            data_process_definition_id=dpd_id, 
                            inputs=[data_product_id], 
                            outputs=[clone_id], 
                            configuration=configuration,
                            argument_map=argument_map, 
                            out_param_name=out_name) 
        return data_process_id
示例#12
0
class ChatServerService(BaseChatsService):
    def on_init(self):
        print "INIT CHAT SERVER"
        self.clients = DotDict()

    def register(self, user_name='', proc_id=''):
        print "Registering user %s, client %s" % (user_name, proc_id)
        client = ProcessRPCClient(node=self.container.node, name=proc_id, iface=IChatcService, process=self)
        self.clients[user_name] = DotDict(procid=proc_id, user_name=user_name, client=client)
        return "OK"

    def unregister(self, user_name=''):
        log.debug("Unregistering client %s" % proc_id)
        del self.clients[user_name]
        return "OK"

    def message(self, from_name='', to_name='', text=''):
        if to_name == "all":
            for cl in self.clients.values():
                cl['client'].message(from_name=from_name, text=text)
        else:
            client = self.clients.get(to_name, None)
            if client:
                client.client.message(from_name=from_name, text=text)
            else:
                return "USER NOT FOUND"
        return "OK"

    def list_users(self):
        return str(self.clients.keys())
示例#13
0
    def test_dot_dict_constant(self):
        d = DotDict({"foo": "bar"})
        self.assertEqual("bar", d.foo)
        d.foo = "somethingnew"
        self.assertEqual("somethingnew", d.foo)

        # DotDict only checks that an assignment operation is happening when it creates dummy entries
        # ... it doesn't check that the dummy entry is on the left hand side of the assignment
        k = d.foo1
        self.assertIn("foo1", dir(d))

        d.lock()

        # test assigning directly to a locked dict
        with self.assertRaises(AttributeError):
            d.foo = "somethingelse"
        self.assertEqual("somethingnew", d.foo)

        # test dummy-creation-on-assignment loophole
        with self.assertRaises(AttributeError):
            k = d.foo2
        self.assertNotIn("foo2", dir(d))

        # test alternate dummy creation method: calling a function with it
        with self.assertRaises(AttributeError):
            k = lambda _: True
            k(d.foo3)
        self.assertNotIn("foo3", dir(d))

        self.assertNotIn(DICT_LOCKING_ATTR, dir(d))
示例#14
0
 def test_dot_dict(self):
     dotDict = DotDict({"foo": {"bar": {"bah": "fah"}}})
     val = dotDict.foo.bar.bah
     self.assertEqual(val, "fah")
     dotDict.a = "1"
     self.assertEqual(dotDict.a, "1")
     self.assertTrue('a' in dotDict)
示例#15
0
 def test_dot_dict(self):
     dotDict = DotDict({"foo": {"bar": {"bah": "fah"}}})
     val = dotDict.foo.bar.bah
     self.assertEqual(val, "fah")
     dotDict.a = "1"
     self.assertEqual(dotDict.a, "1")
     self.assertTrue("a" in dotDict)
示例#16
0
    def setUp(self):
        super(DiscoveryIntTest, self).setUp()

        self._start_container()
        self.addCleanup(DiscoveryIntTest.es_cleanup)
        self.container.start_rel_from_url('res/deploy/r2dm.yml')

        self.discovery = DiscoveryServiceClient()
        self.catalog   = CatalogManagementServiceClient()
        self.ims       = IndexManagementServiceClient()
        self.rr        = ResourceRegistryServiceClient()

        if use_es:
            self.es_host   = CFG.get_safe('server.elasticsearch.host', 'localhost')
            self.es_port   = CFG.get_safe('server.elasticsearch.port', '9200')
            CFG.server.elasticsearch.shards         = 1
            CFG.server.elasticsearch.replicas       = 0
            CFG.server.elasticsearch.river_shards   = 1
            CFG.server.elasticsearch.river_replicas = 0
            self.es = ep.ElasticSearch(
                host=self.es_host,
                port=self.es_port,
                timeout=10,
                verbose=True
            )
            op = DotDict(CFG)
            op.op = 'clean_bootstrap'
            self.container.spawn_process('index_bootstrap','ion.processes.bootstrap.index_bootstrap','IndexBootStrap', op)
示例#17
0
    def test_create_network_definition_from_ci_config_bad(self):

        CFG = DotDict({
            'device_type': "bad_device_type",
        })

        # device_type
        with self.assertRaises(PlatformDefinitionException):
            NetworkUtil.create_network_definition_from_ci_config(CFG)

        CFG = DotDict({
            'device_type': "PlatformDevice",
        })

        # missing platform_id
        with self.assertRaises(PlatformDefinitionException):
            NetworkUtil.create_network_definition_from_ci_config(CFG)

        CFG = DotDict({
            'device_type': "PlatformDevice",
            'platform_config': {
                'platform_id': 'Node1D'
            },
        })

        # missing driver_config
        with self.assertRaises(PlatformDefinitionException):
            NetworkUtil.create_network_definition_from_ci_config(CFG)
示例#18
0
    def test_dotdict_copy(self):
        d = DotDict({"foo": "bar"})
        d2 = copy.copy(d)
        self.assertTrue(hasattr(d2, "foo"))
        self.assertEqual("bar", d2.foo)

        # output_streams = copy(self.CFG.get_safe('process.publish_streams'))
        v = "a12345"
        CFG = DotDict()
        CFG.process.publish_streams.salinity = v
        print "CFG =", CFG
        self.assertTrue(hasattr(CFG.process.publish_streams, "salinity"))
        self.assertEqual(v, CFG.process.publish_streams.salinity)
        self.assertEqual(v, CFG.get_safe("process.publish_streams").salinity)
        self.assertEqual(
            v,
            copy.copy(CFG.get_safe("process.publish_streams")).salinity)

        output_streams = copy.copy(CFG.get_safe("process.publish_streams"))
        print "output_streams =", output_streams
        self.assertTrue(hasattr(output_streams, "salinity"))
        print "output_streams.salinity =", output_streams.salinity
        self.assertEqual(v, output_streams.salinity)

        first_stream = output_streams.popitem()
        print "first_stream =", first_stream
        self.assertEqual(v, first_stream[1])

        d.lock()
        dl = copy.copy(d)
        self.assertTrue(hasattr(dl, "foo"))
        self.assertEqual("bar", dl.foo)
        with self.assertRaises(AttributeError):
            d.foo2 = "nope"
示例#19
0
class ChatServerService(BaseChatsService):
    def on_init(self):
        print "INIT CHAT SERVER"
        self.clients = DotDict()

    def register(self, user_name='', proc_id=''):
        print "Registering user %s, client %s" % (user_name, proc_id)
        client = ProcessRPCClient(node=self.container.node,
                                  name=proc_id,
                                  iface=IChatcService,
                                  process=self)
        self.clients[user_name] = DotDict(procid=proc_id,
                                          user_name=user_name,
                                          client=client)
        return "OK"

    def unregister(self, user_name=''):
        log.debug("Unregistering client %s" % proc_id)
        del self.clients[user_name]
        return "OK"

    def message(self, from_name='', to_name='', text=''):
        if to_name == "all":
            for cl in self.clients.values():
                cl['client'].message(from_name=from_name, text=text)
        else:
            client = self.clients.get(to_name, None)
            if client:
                client.client.message(from_name=from_name, text=text)
            else:
                return "USER NOT FOUND"
        return "OK"

    def list_users(self):
        return str(self.clients.keys())
    def setUp(self):
        mock_clients = self._create_service_mock('index_management')
        self.index_management = IndexManagementService()
        self.index_management.clients = mock_clients

        self.rr_create = mock_clients.resource_registry.create
        self.rr_read = mock_clients.resource_registry.read
        self.rr_update = mock_clients.resource_registry.update
        self.rr_delete = mock_clients.resource_registry.delete
        self.rr_find_resources = mock_clients.resource_registry.find_resources
        self.rr_find_assocs = mock_clients.resource_registry.find_associations
        self.rr_find_subj = mock_clients.resource_registry.find_subjects
        self.rr_find_obj = mock_clients.resource_registry.find_objects
        self.rr_delete_assoc = mock_clients.resource_registry.delete_association

        self.get_datastore = Mock()
        self.db_create = Mock()

        self.get_datastore.return_value = DotDict(
            {'datastore_name': 'test_datastore'})
        self.index_management.container = DotDict({
            'datastore_manager':
            DotDict({'get_datastore': self.get_datastore})
        })
        self.index_name = 'test_index'
    def test_create_schedule(self):

        proc_def = DotDict()
        proc_def['name'] = "someprocess"
        proc_def['executable'] = {'module': 'my_module', 'class': 'class', 'url': 'myurl'}
        mock_read_definition = Mock()
        mock_read_definition.return_value = proc_def
        self.pd_service.backend.read_definition = mock_read_definition

        pid = self.pd_service.create_process("fake-process-def-id")

        proc_schedule = DotDict()
        proc_schedule['target'] = DotDict()
        proc_schedule.target['constraints'] = {"hats": 4}
        proc_schedule.target['node_exclusive'] = None
        proc_schedule.target['execution_engine_id'] = None

        configuration = {"some": "value"}

        pid2 = self.pd_service.schedule_process("fake-process-def-id",
            proc_schedule, configuration, pid)

        self.assertTrue(pid.startswith(proc_def.name) and pid != proc_def.name)
        self.assertEqual(pid, pid2)
        self.assertTrue(pid.startswith(proc_def.name) and pid != proc_def.name)

        self.assertEqual(self.mock_core.schedule_process.call_count, 1)
    def _launch_transform(self, name_of_transform = '', data_proc_def_id = None, input_dpod_id = None, output_dpod_id = None):

        # We need the key name here to be "L2_stream", since when the data process is launched, this name goes into
        # the config as in config.process.publish_streams.L2_stream when the config is used to launch the data process

        if name_of_transform in ['L0', 'L1']:
            binding = '%s_stream' % name_of_transform
        elif name_of_transform == 'L2_salinity':
            binding = 'salinity'
        elif name_of_transform == 'L2_density':
            binding = 'density'

        output_products = {binding : output_dpod_id}

        config = None
        if name_of_transform == 'L1':
            config = self._create_calibration_coefficients_dict()
        elif name_of_transform == 'L2_density':
            config = DotDict()
            config.process = {'lat' : 32.7153, 'lon' : 117.1564}

        data_proc_id = self.data_process_management.create_data_process( data_proc_def_id, [input_dpod_id], output_products, config)
        self.addCleanup(self.data_process_management.delete_data_process, data_proc_id)

        self.data_process_management.activate_data_process(data_proc_id)
        self.addCleanup(self.data_process_management.deactivate_data_process, data_proc_id)

        log.debug("Created a data process for ctdbp %s transform: id = %s", name_of_transform, data_proc_id)

        return data_proc_id
    def test_list_indexes(self):
        # Mocks
        self.rr_find_resources.return_value = ([
            DotDict({
                '_id': '1',
                'name': '1'
            }),
            DotDict({
                '_id': '2',
                'name': '2'
            }),
            DotDict({
                '_id': '3',
                'name': '3'
            }),
            DotDict({
                '_id': '4',
                'name': '4'
            })
        ], [1, 2, 3, 4])

        # Execution
        retval = self.index_management.list_indexes()

        # Assertions
        self.assertTrue(retval == {
            '1': '1',
            '2': '2',
            '3': '3',
            '4': '4'
        }, 'Index mismatch')
示例#24
0
文件: apps.py 项目: ooici-dm/pyon
    def start_app(self, appdef=None, config=None):
        """
        @brief Start an app from an app definition.
        Note: apps can come in one of 2 variants:
        1 processapp: In-line defined process to be started
        2 regular app: Full app definition
        """
        log.debug("AppManager.start_app(appdef=%s) ..." % appdef)

        appdef = DotDict(appdef)
        app_config = DictModifier(CFG)

        if 'config' in appdef:
            # Apply config from app file
            app_file_cfg = DotDict(appdef.config)
            app_config.update(app_file_cfg)

        if config:
            # Nest dict modifier and apply config from rel file
            app_config = DictModifier(app_config, config)

        if 'processapp' in appdef:
            # Case 1: Appdef contains definition of process to start
            name, module, cls = appdef.processapp
            try:
                pid = self.container.spawn_process(name, module, cls,
                                                   app_config)
                appdef._pid = pid
                self.apps.append(appdef)
            except Exception, ex:
                log.exception("Appl %s start from processapp failed" %
                              appdef.name)
示例#25
0
文件: apps.py 项目: blazetopher/pyon
    def start_app(self, appdef=None, config=None):
        """
        @brief Start an app from an app definition.
        Note: apps can come in one of 2 variants:
        1 processapp: In-line defined process to be started
        2 regular app: Full app definition
        """
        log.debug("AppManager.start_app(appdef=%s) ..." % appdef)

        appdef = DotDict(appdef)
        app_config = DictModifier(CFG)

        if 'config' in appdef:
            # Apply config from app file
            app_file_cfg = DotDict(appdef.config)
            app_config.update(app_file_cfg)

        if config:
            # Nest dict modifier and apply config from rel file
            app_config = DictModifier(app_config, config)

        if 'processapp' in appdef:
            # Case 1: Appdef contains definition of process to start
            name, module, cls = appdef.processapp
            try:
                pid = self.container.spawn_process(name, module, cls, app_config)
                appdef._pid = pid
                self.apps.append(appdef)
            except Exception, ex:
                log.exception("Appl %s start from processapp failed" % appdef.name)
    def cache_resources(self, resource_type, specific_ids=None):
        """
        Save all resources of a given type to memory, for in-memory lookup ops

        This is a PREFETCH operation, and EnhancedResourceRegistryClient objects that use the cache functionality
        should NOT be kept across service calls.
        """
        #log.info("Caching resources: %s", resource_type)
        #log.debug("This cache is %s", self)
        time_caching_start = get_ion_ts()

        resource_objs = []
        if specific_ids is None:
            resource_objs, _ = self.RR.find_resources(restype=resource_type, id_only=False)
        else:
            assert type(specific_ids) is list
            if specific_ids:
                resource_objs = self.RR.read_mult(specific_ids)

        lookups = DotDict()
        lookups.by_id =   {}
        lookups.by_name = {}
        self._cached_resources[resource_type] = lookups

        for r in resource_objs:
            self._add_resource_to_cache(resource_type, r)

        time_caching_stop = get_ion_ts()

        total_time = int(time_caching_stop) - int(time_caching_start)
class TestContainerExchangeToEms(IonIntegrationTestCase):
    # these tests should auto contact the EMS to do the work
    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2coi.yml')

        self.ems = ExchangeManagementServiceClient()
        self.rr = ResourceRegistryServiceClient()

        # we want the ex manager to do its thing, but without actual calls to broker
        # just mock out the transport
        self.container.ex_manager._priviledged_transport = Mock(BaseTransport)

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),
                     'Test reaches into container, doesn\'t work with CEI')
    def test_create_xs_talks_to_ems(self):
        self.patch_cfg('pyon.ion.exchange.CFG',
                       container=DotDict(CFG.container,
                                         exchange=DotDict(auto_register=True)))

        xs = self.container.ex_manager.create_xs('house')
        self.addCleanup(xs.delete)

        # should have called EMS and set RR items
        res, _ = self.rr.find_resources(RT.ExchangeSpace, name='house')
        self.assertEquals(res[0].name, 'house')

        # should have tried to call broker as well
        self.assertEquals(
            self.container.ex_manager._priviledged_transport.
            declare_exchange_impl.call_count, 1)
        self.assertIn(
            'house', self.container.ex_manager._priviledged_transport.
            declare_exchange_impl.call_args[0][0])

    @patch.dict('pyon.ion.exchange.CFG',
                container=DotDict(CFG.container,
                                  exchange=DotDict(auto_register=False)))
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),
                     'Test reaches into container, doesn\'t work with CEI')
    def test_create_xs_with_no_flag_only_uses_ex_manager(self):
        self.patch_cfg('pyon.ion.exchange.CFG',
                       container=DotDict(
                           CFG.container,
                           exchange=DotDict(auto_register=False)))

        xs = self.container.ex_manager.create_xs('house')
        self.addCleanup(xs.delete)

        e1, e2 = self.rr.find_resources(RT.ExchangeSpace, name='house')
        self.assertEquals(e1, [])
        self.assertEquals(e2, [])
        self.assertEquals(
            self.container.ex_manager._priviledged_transport.
            declare_exchange_impl.call_count, 1)
        self.assertIn(
            'house', self.container.ex_manager._priviledged_transport.
            declare_exchange_impl.call_args[0][0])
    def test_is_persisted(self):
        stream = DotDict()
        stream.persisted = True
        self.pubsub_read.return_value = stream

        retval = self.ingestion_management.is_persisted("stream_id")

        self.assertEquals(retval, True)
示例#29
0
 def preload_ui(self):
     config = DotDict()
     config.op='loadui'
     config.loadui=True
     config.attachments='res/preload/r2_ioc/attachments'
     config.ui_path = "http://userexperience.oceanobservatories.org/database-exports/Candidates"
     
     self.container.spawn_process('preloader', 'ion.processes.bootstrap.ion_loader', 'IONLoader', config)
 def lc_preload(self):
     config = DotDict()
     config.op = 'load'
     config.scenario = 'BASE,LC_TEST'
     config.categories = 'ParameterFunctions,ParameterDefs,ParameterDictionary'
     config.path = 'res/preload/r2_ioc'
     
     self.container.spawn_process('preload','ion.processes.bootstrap.ion_loader','IONLoader', config)
    def _create_instrument_config_builder(self):
        clients = DotDict()
        clients.resource_registry  = self.RR
        clients.pubsub_management  = self.PSC
        clients.dataset_management = self.DSC
        iconfig_builder = InstrumentAgentConfigurationBuilder(clients)

        return iconfig_builder
    def test_bridge_create_schedule(self):
        pdcfg = dict(uri="amqp://hello", topic="pd", exchange="123")
        self.pd_service.CFG = DotDict()
        self.pd_service.CFG['process_dispatcher_bridge'] = pdcfg
        self.pd_service.init()
        self.assertIsInstance(self.pd_service.backend, PDBridgeBackend)

        event_pub = Mock()
        self.pd_service.backend.event_pub = event_pub

        # sneak in and replace dashi connection method
        mock_dashi = Mock()
        mock_dashi.consume.return_value = lambda : None
        self.pd_service.backend._init_dashi = lambda : mock_dashi

        self.pd_service.start()
        self.assertEqual(mock_dashi.handle.call_count, 1)

        proc_def = DotDict()
        proc_def['name'] = "someprocess"
        proc_def['executable'] = {'module':'my_module', 'class':'class'}
        self.mock_rr_read.return_value = proc_def

        pid = self.pd_service.create_process("fake-process-def-id")

        proc_schedule = DotDict()
        proc_schedule['target'] = DotDict()
        proc_schedule.target['constraints'] = {"hats" : 4}

        configuration = {"some": "value"}

        pid2 = self.pd_service.schedule_process("fake-process-def-id",
            proc_schedule, configuration, pid)

        self.assertTrue(pid.startswith(proc_def.name) and pid != proc_def.name)
        self.assertEqual(pid, pid2)
        self.assertTrue(pid.startswith(proc_def.name) and pid != proc_def.name)
        self.assertEqual(mock_dashi.call.call_count, 1)
        call_args, call_kwargs = mock_dashi.call.call_args
        self.assertEqual(set(call_kwargs),
            set(['upid', 'spec', 'subscribers', 'constraints']))
        self.assertEqual(call_kwargs['constraints'],
            proc_schedule.target['constraints'])
        self.assertEqual(call_kwargs['subscribers'],
            self.pd_service.backend.pd_process_subscribers)
        self.assertEqual(call_args, ("pd", "dispatch_process"))
        self.assertEqual(event_pub.publish_event.call_count, 0)

        # trigger some fake async state updates from dashi. first
        # should not trigger an event

        self.pd_service.backend._process_state(dict(upid=pid,
            state="400-PENDING"))
        self.assertEqual(event_pub.publish_event.call_count, 0)

        self.pd_service.backend._process_state(dict(upid=pid,
            state="500-RUNNING"))
        self.assertEqual(event_pub.publish_event.call_count, 1)
示例#33
0
    def test_query_request_association(self):
        query = DotDict()
        query.association = 'resource_id'

        self.discovery.query_association = Mock()
        self.discovery.query_association.return_value = 'test'

        retval = self.discovery.query_request(query)
        self.assertTrue(retval == 'test')
示例#34
0
    def test_query_request_collection(self):
        query = DotDict()
        query.collection = 'test'

        self.discovery.query_collection = Mock()
        self.discovery.query_collection.return_value = 'test'

        retval = self.discovery.query_request(query)
        self.assertTrue(retval == 'test')
    def test_get_dataset_info(self):
        coverage = DotDict()
        coverage.info = 1

        self.dataset_management._get_coverage = Mock()
        self.dataset_management._get_coverage.return_value = coverage

        retval = self.dataset_management.get_dataset_info('dataset_id')
        self.assertEquals(retval,1)
示例#36
0
    def test_query_request_collection(self):
        query = DotDict()
        query.collection = 'test'

        self.discovery.query_collection = Mock()
        self.discovery.query_collection.return_value = 'test'

        retval = self.discovery.query_request(query)
        self.assertTrue(retval == 'test')
示例#37
0
    def test_query_request_association(self):
        query = DotDict()
        query.association = 'resource_id'

        self.discovery.query_association = Mock()
        self.discovery.query_association.return_value = 'test'

        retval = self.discovery.query_request(query)
        self.assertTrue(retval == 'test')
示例#38
0
    def test_dotdict_builtin_error(self):
        # Ensures that you can not override the builtin methods
        base = DotDict()
        with self.assertRaises(AttributeError):
            base.pop = "shouldnt work"
        with self.assertRaises(AttributeError):
            base.__getitem__ = "really shouldnt work"

        with self.assertRaises(AttributeError):
            base.another.chained.pop = "again should not work"
示例#39
0
    def test_dotdict_builtin_error(self):
        # Ensures that you can not override the builtin methods
        base = DotDict()
        with self.assertRaises(AttributeError):
            base.pop = 'shouldnt work'
        with self.assertRaises(AttributeError):
            base.__getitem__ = 'really shouldnt work'

        with self.assertRaises(AttributeError):
            base.another.chained.pop = 'again should not work'
示例#40
0
    def test_query_request_term_search(self):
        query = DotDict()
        query.index = 'index_id'
        query.field = 'field'
        query.value = 'value'
        self.discovery.query_term = Mock()
        self.discovery.query_term.return_value = 'test'

        retval = self.discovery.query_request(query)
        self.assertTrue(retval == 'test', '%s' % retval)
示例#41
0
    def test_query_request_term_search(self):
        query = DotDict()
        query.index = 'index_id'
        query.field = 'field'
        query.value = 'value'
        self.discovery.query_term = Mock()
        self.discovery.query_term.return_value = 'test'

        retval = self.discovery.query_request(query)
        self.assertTrue(retval == 'test', '%s' % retval)
示例#42
0
    def test_dotdict_chaining(self):
        base = DotDict({'test': None})
        base.chained.example.provides.utility = True
        self.assertTrue(base['chained']['example']['provides']['utility'])
        base.setting = True
        self.assertTrue(base.setting)
        self.assertTrue(base['setting'])

        base.map = {'key': 'value'}
        self.assertIsInstance(base.map, DotDict, '%s' % type(base.map))
        self.assertTrue(base.map.key == 'value')
示例#43
0
    def test_query_request_range(self):
        query = DotDict()
        query['range'] = {'from':0, 'to':90}
        query.index = 'index_id'
        query.field = 'field'

        self.discovery.query_range = Mock()
        self.discovery.query_range.return_value = 'test'

        retval = self.discovery.query_request(query)
        self.assertTrue(retval == 'test')
示例#44
0
    def test_dotdict_chaining(self):
        base = DotDict({'test':None})
        base.chained.example.provides.utility = True
        self.assertTrue(base['chained']['example']['provides']['utility'])
        base.setting = True
        self.assertTrue(base.setting)
        self.assertTrue(base['setting'])

        base.map = {'key':'value'}
        self.assertIsInstance(base.map,DotDict, '%s' % type(base.map))
        self.assertTrue(base.map.key=='value')
示例#45
0
    def test_query_request_range(self):
        query = DotDict()
        query['range'] = {'from':0, 'to':90}
        query.index = 'index_id'
        query.field = 'field'

        self.discovery.query_range = Mock()
        self.discovery.query_range.return_value = 'test'

        retval = self.discovery.query_request(query)
        self.assertTrue(retval == 'test')
    def test_delete_ingestion(self):
        testval = DotDict()
        testval.o = "sub_id"
        self.rr_find_assocs.return_value = [testval]

        self.ingestion_management.delete_ingestion_configuration("test")

        self.rr_del_assoc.assert_called_once_with(testval)
        self.pubsub_del_sub.assert_called_once_with(testval.o)

        self.rr_delete.assert_called_once_with("test")
示例#47
0
    def test_delete_ingestion(self):
        testval = DotDict()
        testval.o = 'sub_id'
        self.rr_find_assocs.return_value = [testval]

        self.ingestion_management.delete_ingestion_configuration('test')

        self.rr_del_assoc.assert_called_once_with(testval)
        self.pubsub_del_sub.assert_called_once_with(testval.o)

        self.rr_delete.assert_called_once_with('test')
    def setUp(self):
        self.pd_service = ProcessDispatcherService()
        self.pd_service.container = DotDict()
        self.pd_service.container['spawn_process'] = Mock()
        self.pd_service.container['id'] = 'mock_container_id'
        self.pd_service.container['proc_manager'] = DotDict()
        self.pd_service.container['resource_registry'] = Mock()
        self.pd_service.container.proc_manager['terminate_process'] = Mock()
        self.pd_service.container.proc_manager['procs'] = {}

        pdcfg = dict(dashi_uri="amqp://hello",
                     dashi_exchange="123",
                     static_resources=True,
                     backend="native")
        self.pd_service.CFG = DotDict()
        self.pd_service.CFG['processdispatcher'] = pdcfg

        self.mock_dashi = Mock()

        with patch.multiple('ion.services.cei.process_dispatcher_service',
                            get_dashi=DEFAULT,
                            ProcessDispatcherCore=DEFAULT,
                            get_processdispatcher_store=DEFAULT,
                            EngineRegistry=DEFAULT,
                            PDMatchmaker=DEFAULT,
                            PDDoctor=DEFAULT) as mocks:
            mocks['get_dashi'].return_value = self.mock_dashi
            mocks[
                'get_processdispatcher_store'].return_value = self.mock_store = Mock(
                )
            mocks[
                'ProcessDispatcherCore'].return_value = self.mock_core = Mock(
                )
            mocks['PDMatchmaker'].return_value = self.mock_matchmaker = Mock()
            mocks['PDDoctor'].return_value = self.mock_doctor = Mock()
            mocks[
                'EngineRegistry'].return_value = self.mock_engineregistry = Mock(
                )

            self.pd_service.init()

        # replace the core and matchmaker with mocks
        self.pd_service.backend.beat_subscriber = self.mock_beat_subscriber = Mock(
        )
        self.assertIsInstance(self.pd_service.backend, PDNativeBackend)
        self.pd_service.backend.rr = self.mock_rr = Mock()

        self.event_pub = Mock()
        self.pd_service.backend.event_pub = self.event_pub

        self.pd_service.start()
        self.assertEqual(self.mock_dashi.handle.call_count, 1)
        self.mock_matchmaker.start_election.assert_called_once_with()
        self.mock_beat_subscriber.start.assert_called_once_with()
示例#49
0
    def test_dotdict_chaining(self):
        base = DotDict({"test": None})
        base.chained.example.provides.utility = True
        self.assertTrue(base["chained"]["example"]["provides"]["utility"])
        base.setting = True
        self.assertTrue(base.setting)
        self.assertTrue(base["setting"])

        base.map = {"key": "value"}
        self.assertIsInstance(base.map, DotDict, "%s" % type(base.map))
        self.assertTrue(base.map.key == "value")
    def _create_platform_config_builder(self):
        clients = DotDict()
        clients.resource_registry  = self.RR
        clients.pubsub_management  = self.PSC
        clients.dataset_management = self.DSC
        pconfig_builder = PlatformAgentConfigurationBuilder(clients)

        # can't do anything without an agent instance obj
        log.debug("Testing that preparing a launcher without agent instance raises an error")
        self.assertRaises(AssertionError, pconfig_builder.prepare, will_launch=False)

        return pconfig_builder
示例#51
0
    def on_initial_bootstrap(self, process, config, **kwargs):
        if config.get_safe('system.elasticsearch') and config.get_safe('bootstrap.use_es'):
            #---------------------------------------------
            # Spawn the index bootstrap
            #---------------------------------------------
            config = DotDict(config)
            config.op                   = 'clean_bootstrap'

            process.container.spawn_process('index_bootstrap','ion.processes.bootstrap.index_bootstrap','IndexBootStrap',config)
            #---------------------------------------------
        else:
            log.info("Not creating the ES indexes.")
示例#52
0
    def recv_packet(self, msg, stream_route, stream_id):
        '''
        The callback method. For situations like bad or no data, publish an alert event.

        @param msg granule
        @param stream_route StreamRoute object
        @param stream_id str
        '''

        log.debug("DemoStreamAlertTransform received a packet!: %s" % msg)
        log.debug("type of packet received by transform: %s", type(msg))

        #-------------------------------------------------------------------------------------
        # Set up the config to use to pass info to the transform algorithm
        #-------------------------------------------------------------------------------------
        config = DotDict()
        config.valid_values = self.valid_values
        config.variable_name = self.instrument_variable_name
        config.time_field_name = self.time_field_name

        #-------------------------------------------------------------------------------------
        # Store the granule received
        #-------------------------------------------------------------------------------------
        self.granules.put(msg)

        #-------------------------------------------------------------------------------------
        # Check for good and bad values in the granule
        #-------------------------------------------------------------------------------------
        bad_values, bad_value_times, self.origin = AlertTransformAlgorithm.execute(
            msg, config=config)

        log.debug(
            "DemoStreamAlertTransform got the origin of the event as: %s" %
            self.origin)

        #-------------------------------------------------------------------------------------
        # If there are any bad values, publish an alert event for the granule
        #-------------------------------------------------------------------------------------
        if bad_values:
            # Publish the event
            self.publisher.publish_event(
                event_type='DeviceStatusEvent',
                origin=self.origin,
                origin_type='PlatformDevice',
                sub_type=self.instrument_variable_name,
                values=bad_values,
                time_stamps=bad_value_times,
                valid_values=self.valid_values,
                state=DeviceStatusType.OUT_OF_RANGE,
                description="Event to deliver the status of instrument.")

            log.debug("DemoStreamAlertTransform published a BAD DATA event")
示例#53
0
    def setUp(self):
        self.mock_real_client = DotDict()
        self.mock_real_client.read_process_definition = Mock()
        self.mock_real_client.create_process = Mock()
        self.mock_real_client.schedule_process = Mock()
        self.mock_real_client.read_process = Mock()
        self.mock_eventpub = DotDict()
        self.mock_eventpub.publish_event = Mock()
        self.mock_container = Mock()

        self.client = ProcessDispatcherSimpleAPIClient('fake',
            real_client=self.mock_real_client, container=self.mock_container)
        self.client.event_pub = self.mock_eventpub
示例#54
0
 def preload_sptest(self):
     config = DotDict()
     config.op = 'load'
     config.loadui=True
     config.ui_path =  "http://userexperience.oceanobservatories.org/database-exports/Candidates"
     config.attachments = "res/preload/r2_ioc/attachments"
     config.scenario = 'BETA,SP_TEST'
     config.path = 'master'
     config.categories='ParameterFunctions,ParameterDefs,ParameterDictionary'
     self.container.spawn_process('preloader', 'ion.processes.bootstrap.ion_loader', 'IONLoader', config)
示例#55
0
    def setUp(self):

        self.cfg = DotDict()
        self.cfg.eeagent = DotDict()
        self.cfg.eeagent.heartbeat = 1
        self.factory = Mock()

        self.process_id = 'fake'
        self.process = DotDict()
        self.process._process = DotDict()
        self.process._process.heartbeat = Mock(return_value=self.heartbeater_not_ok)
        self.process._process.listeners = []

        self.heartbeater = HeartBeater(self.cfg, self.factory, self.process_id, self.process)
示例#56
0
    def load_data_process(self, stream_id=""):

        dpms_client = DataProcessManagementServiceClient()

        dataprocess_details_list = dpms_client.read_data_process_for_stream(
            stream_id)

        dataprocess_ids = []
        #this returns a list of data process info dicts
        for dataprocess_details in dataprocess_details_list:

            dataprocess_details = DotDict(dataprocess_details or {})
            dataprocess_id = dataprocess_details.dataprocess_id

            #set metrics attributes
            dataprocess_details.granule_counter = 0

            self._dataprocesses[dataprocess_id] = dataprocess_details
            log.debug('load_data_process  dataprocess_id: %s', dataprocess_id)
            log.debug('load_data_process  dataprocess_details: %s',
                      dataprocess_details)

            # validate details
            # if not outstream info avaialable log a warning but TF may publish an event so proceed
            if not dataprocess_details.out_stream_def or not dataprocess_details.output_param:
                log.warning(
                    'No output stream details provided for data process %s, will not publish a granule',
                    dataprocess_id)

            #add the stream id to the map
            if 'in_stream_id' in dataprocess_details:
                if dataprocess_details['in_stream_id'] in self._streamid_map:
                    (self._streamid_map[dataprocess_details['in_stream_id']]
                     ).append(dataprocess_id)
                else:
                    self._streamid_map[dataprocess_details['in_stream_id']] = [
                        dataprocess_id
                    ]
            #todo: add transform worker id
            self.event_publisher.publish_event(
                origin=dataprocess_id,
                origin_type='DataProcess',
                status=DataProcessStatusType.NORMAL,
                description='data process loaded into transform worker')

            #create a publisher for output stream
            self.create_publisher(dataprocess_id, dataprocess_details)
            dataprocess_ids.append(dataprocess_id)

        return dataprocess_ids
    def recv_packet(self, msg, stream_route, stream_id):
        '''
        The callback method. For situations like bad or no data, publish an alert event.

        @param msg granule
        @param stream_route StreamRoute object
        @param stream_id str
        '''

        log.debug("DemoStreamAlertTransform received a packet!: %s" % msg)
        log.debug("type of packet received by transform: %s", type(msg))

        #-------------------------------------------------------------------------------------
        # Set up the config to use to pass info to the transform algorithm
        #-------------------------------------------------------------------------------------
        config = DotDict()
        config.valid_values = self.valid_values
        config.variable_name = self.instrument_variable_name
        config.time_field_name = self.time_field_name

        #-------------------------------------------------------------------------------------
        # Store the granule received
        #-------------------------------------------------------------------------------------
        self.granules.put(msg)

        #-------------------------------------------------------------------------------------
        # Check for good and bad values in the granule
        #-------------------------------------------------------------------------------------
        bad_values, bad_value_times, self.origin = AlertTransformAlgorithm.execute(msg, config = config)

        log.debug("DemoStreamAlertTransform got the origin of the event as: %s" % self.origin)

        #-------------------------------------------------------------------------------------
        # If there are any bad values, publish an alert event for the granule
        #-------------------------------------------------------------------------------------
        if bad_values:
            # Publish the event
            self.publisher.publish_event(
                event_type = 'DeviceStatusEvent',
                origin = self.origin,
                origin_type='PlatformDevice',
                sub_type = self.instrument_variable_name,
                values = bad_values,
                time_stamps = bad_value_times,
                valid_values = self.valid_values,
                state = DeviceStatusType.OUT_OF_RANGE,
                description = "Event to deliver the status of instrument."
            )

            log.debug("DemoStreamAlertTransform published a BAD DATA event")
    def test_schedule_haagent_name(self):
        haa_proc_def = DotDict()
        haa_proc_def['name'] = "haagent"
        haa_proc_def['executable'] = {'module': 'my_module', 'class': 'class'}

        payload_proc_def = DotDict()
        payload_proc_def['name'] = "payload_process"
        payload_proc_def['executable'] = {
            'module': 'my_module',
            'class': 'class'
        }

        proc_defs = {
            "haa_proc_def_id": haa_proc_def,
            "payload_proc_def_id": payload_proc_def
        }

        read_definition_mock = Mock()
        read_definition_mock.side_effect = proc_defs.get
        self.pd_service.backend.read_definition = read_definition_mock

        # not used for anything in local mode
        proc_schedule = DotDict()

        configuration = {
            "highavailability": {
                "process_definition_id": "payload_proc_def_id"
            }
        }
        self.pd_service.schedule_process("haa_proc_def_id", proc_schedule,
                                         configuration)

        self.assertEqual(self.mock_core.schedule_process.call_count, 1)
        name = self.mock_core.schedule_process.call_args[1]['name']
        self.assertTrue(name.startswith("payload_process-ha"))

        # now try with scheduling by process definition name instead of ID
        self.mock_core.schedule_process.reset_mock()
        configuration = {
            "highavailability": {
                "process_definition_name": "payload_process"
            }
        }
        self.pd_service.schedule_process("haa_proc_def_id", proc_schedule,
                                         configuration)

        self.assertEqual(self.mock_core.schedule_process.call_count, 1)
        name = self.mock_core.schedule_process.call_args[1]['name']
        self.assertTrue(name.startswith("payload_process-ha"))
示例#59
0
    def test_index_bootstrap(self, mock_es, ims_cli):
        #---------------------------------------------
        # Mocks
        #---------------------------------------------
        mock_es().index_create.return_value = {'ok': True, 'status': 200}
        mock_es().raw.return_value = {'ok': True, 'status': 200}
        mock_es().river_couchdb_create.return_value = {
            'ok': True,
            'status': 200
        }

        db = DotDict()
        db.datastore_name = 'test'
        db.server.test.create = Mock()

        container = DotDict()
        container.datastore_manager.get_datastore = Mock()
        container.datastore_manager.get_datastore.return_value = db

        config = CFG
        config.system.force_clean = False
        config.system.elasticsearch = True
        config.server.elasticsearch.host = ''
        config.server.elasticsearch.port = ''
        config.op = 'index_bootstrap'

        #---------------------------------------------
        # Execution
        #---------------------------------------------
        ibs = IndexBootStrap()
        ibs.CFG = config
        ibs.container = container
        ibs.on_start()

        index_count = len(STD_INDEXES) + len(EDGE_INDEXES) + 1  # for _river
        self.assertTrue(
            mock_es().index_create.call_count == index_count,
            '(%s != %s) Improper number of indices created' %
            (mock_es().index_create.call_count, index_count))

        river_count = len(STD_INDEXES) + len(EDGE_INDEXES)
        self.assertTrue(
            mock_es().river_couchdb_create.call_count == river_count,
            'Improper number of rivers created')

        total_count = len(STD_INDEXES) + len(COUCHDB_INDEXES) + len(
            EDGE_INDEXES)
        self.assertTrue(ims_cli().create_index.call_count == total_count,
                        'Improper number of index resources created')
    def test_cached_predicate_search(self):
        d = "d_id"
        m = "m_id"
        x = "x_id"

        good_assn = DotDict(s=d,
                            st=RT.InstrumentDevice,
                            p=PRED.hasModel,
                            o=m,
                            ot=RT.InstrumentModel)
        bad_assn = DotDict(s=d,
                           st=RT.PlatformDevice,
                           p=PRED.hasModel,
                           o=m,
                           ot=RT.PlatformModel)

        self.rr.find_associations.return_value = [good_assn, bad_assn]

        self.RR2.cache_predicate(PRED.hasModel)

        self.assertTrue(self.RR2.has_cached_predicate(PRED.hasModel))
        self.rr.find_associations.assert_called_once_with(
            predicate=PRED.hasModel, id_only=False)

        # object searches that should return 0, 0, 1 results
        results = self.RR2.find_objects(x, PRED.hasModel, RT.InstrumentModel,
                                        True)
        self.assertEqual([], results)
        results = self.RR2.find_instrument_model_ids_of_instrument_device_using_has_model(
            x)
        self.assertEqual([], results)
        results = self.RR2.find_instrument_model_ids_of_instrument_device_using_has_model(
            d)
        self.assertEqual([m], results)

        self.assertEqual(0, self.rr.find_objects.call_count)

        # subject searches that should return 0, 0, 1 results
        results = self.RR2.find_subjects(RT.InstrumentDevice, PRED.hasModel, x,
                                         True)
        self.assertEqual([], results)
        results = self.RR2.find_instrument_device_ids_by_instrument_model_using_has_model(
            x)
        self.assertEqual([], results)
        results = self.RR2.find_instrument_device_ids_by_instrument_model_using_has_model(
            m)
        self.assertEqual([d], results)

        self.assertEqual(0, self.rr.find_subjects.call_count)