Пример #1
0
    def setUp(self):
        super(DiscoveryIntTest, self).setUp()

        self._start_container()
        self.addCleanup(DiscoveryIntTest.es_cleanup)
        self.container.start_rel_from_url('res/deploy/r2dm.yml')

        self.discovery = DiscoveryServiceClient()
        self.catalog   = CatalogManagementServiceClient()
        self.ims       = IndexManagementServiceClient()
        self.rr        = ResourceRegistryServiceClient()

        if use_es:
            self.es_host   = CFG.get_safe('server.elasticsearch.host', 'localhost')
            self.es_port   = CFG.get_safe('server.elasticsearch.port', '9200')
            CFG.server.elasticsearch.shards         = 1
            CFG.server.elasticsearch.replicas       = 0
            CFG.server.elasticsearch.river_shards   = 1
            CFG.server.elasticsearch.river_replicas = 0
            self.es = ep.ElasticSearch(
                host=self.es_host,
                port=self.es_port,
                timeout=10,
                verbose=True
            )
            op = DotDict(CFG)
            op.op = 'clean_bootstrap'
            self.container.spawn_process('index_bootstrap','ion.processes.bootstrap.index_bootstrap','IndexBootStrap', op)
Пример #2
0
    def setUp(self):
        super(CatalogManagementIntTest,self).setUp()

        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2dm.yml')

        self.cms_cli = CatalogManagementServiceClient()
        self.rr_cli  = ResourceRegistryServiceClient()
Пример #3
0
    def setUp(self):
        super(DiscoveryIntTest, self).setUp()

        self._start_container()
        self.addCleanup(DiscoveryIntTest.es_cleanup)
        self.container.start_rel_from_url('res/deploy/r2dm.yml')

        self.discovery = DiscoveryServiceClient()
        self.catalog   = CatalogManagementServiceClient()
        self.ims       = IndexManagementServiceClient()
        self.rr        = ResourceRegistryServiceClient()

        if use_es:
            self.es_host   = CFG.get_safe('server.elasticsearch.host', 'localhost')
            self.es_port   = CFG.get_safe('server.elasticsearch.port', '9200')
            CFG.server.elasticsearch.shards         = 1
            CFG.server.elasticsearch.replicas       = 0
            CFG.server.elasticsearch.river_shards   = 1
            CFG.server.elasticsearch.river_replicas = 0
            self.es = ep.ElasticSearch(
                host=self.es_host,
                port=self.es_port,
                timeout=10,
                verbose=True
            )
            op = DotDict(CFG)
            op.op = 'clean_bootstrap'
            self.container.spawn_process('index_bootstrap','ion.processes.bootstrap.index_bootstrap','IndexBootStrap', op)
    def setUp(self):
        super(CatalogManagementIntTest,self).setUp()

        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2dm.yml')

        self.cms_cli = CatalogManagementServiceClient()
        self.rr_cli  = ResourceRegistryServiceClient()
Пример #5
0
    def setUp(self):
        super(DiscoveryIntTest, self).setUp()
        config = DotDict()
        config.bootstrap.use_es = True

        self._start_container()
        self.addCleanup(DiscoveryIntTest.es_cleanup)
        self.container.start_rel_from_url('res/deploy/r2dm.yml', config)

        self.discovery = DiscoveryServiceClient()
        self.catalog   = CatalogManagementServiceClient()
        self.ims       = IndexManagementServiceClient()
        self.rr        = ResourceRegistryServiceClient()
Пример #6
0
class DiscoveryIntTest(IonIntegrationTestCase):
    def setUp(self):
        super(DiscoveryIntTest, self).setUp()
        config = DotDict()
        config.bootstrap.use_es = True

        self._start_container()
        self.addCleanup(DiscoveryIntTest.es_cleanup)
        self.container.start_rel_from_url('res/deploy/r2deploy.yml', config)

        self.discovery               = DiscoveryServiceClient()
        self.catalog                 = CatalogManagementServiceClient()
        self.ims                     = IndexManagementServiceClient()
        self.rr                      = ResourceRegistryServiceClient()
        self.dataset_management      = DatasetManagementServiceClient()
        self.pubsub_management       = PubsubManagementServiceClient()
        self.data_product_management = DataProductManagementServiceClient()

    @staticmethod
    def es_cleanup():
        es_host = CFG.get_safe('server.elasticsearch.host', 'localhost')
        es_port = CFG.get_safe('server.elasticsearch.port', '9200')
        es = ep.ElasticSearch(
            host=es_host,
            port=es_port,
            timeout=10
        )
        indexes = STD_INDEXES.keys()
        indexes.append('%s_resources_index' % get_sys_name().lower())
        indexes.append('%s_events_index' % get_sys_name().lower())

        for index in indexes:
            IndexManagementService._es_call(es.river_couchdb_delete,index)
            IndexManagementService._es_call(es.index_delete,index)


    def poll(self, tries, callback, *args, **kwargs):
        '''
        Polling wrapper for queries
        Elasticsearch may not index and cache the changes right away so we may need 
        a couple of tries and a little time to go by before the results show.
        '''
        for i in xrange(tries):
            retval = callback(*args, **kwargs)
            if retval:
                return retval
            time.sleep(0.2)
        return None


    def test_traversal(self):
        dp        = DataProcess()
        transform = Transform()
        pd        = ProcessDefinition()

        dp_id, _        = self.rr.create(dp)
        transform_id, _ = self.rr.create(transform)
        pd_id, _        = self.rr.create(pd)

        self.rr.create_association(subject=dp_id, object=transform_id, predicate=PRED.hasTransform)
        self.rr.create_association(subject=transform_id, object=pd_id, predicate=PRED.hasProcessDefinition)

        results = self.discovery.traverse(dp_id)
        results.sort()
        correct = [pd_id, transform_id]
        correct.sort()
        self.assertTrue(results == correct, '%s' % results)

    def test_iterative_traversal(self):
        dp        = DataProcess()
        transform = Transform()
        pd        = ProcessDefinition()

        dp_id, _        = self.rr.create(dp)
        transform_id, _ = self.rr.create(transform)
        pd_id, _        = self.rr.create(pd)

        self.rr.create_association(subject=dp_id, object=transform_id, predicate=PRED.hasTransform)
        self.rr.create_association(subject=transform_id, object=pd_id, predicate=PRED.hasProcessDefinition)

        results = self.discovery.iterative_traverse(dp_id)
        results.sort()
        correct = [transform_id]
        self.assertTrue(results == correct)

        results = self.discovery.iterative_traverse(dp_id, 1)
        results.sort()
        correct = [transform_id, pd_id]
        correct.sort()
        self.assertTrue(results == correct)
    
    @skipIf(not use_es, 'No ElasticSearch')
    def test_view_crud(self):
        view_id = self.discovery.create_view('big_view',fields=['name'])
        catalog_id = self.discovery.list_catalogs(view_id)[0]
        index_ids = self.catalog.list_indexes(catalog_id)
        self.assertTrue(len(index_ids))

        view = self.discovery.read_view(view_id)
        self.assertIsInstance(view,View)
        self.assertTrue(view.name == 'big_view')

        view.name = 'not_so_big_view'

        self.discovery.update_view(view)

        view = self.discovery.read_view(view_id)
        self.assertTrue(view.name == 'not_so_big_view')

        self.discovery.delete_view(view_id)
        with self.assertRaises(NotFound):
            self.discovery.read_view(view_id)

    def test_view_best_match(self):
        #---------------------------------------------------------------
        # Matches the best catalog available OR creates a new one
        #---------------------------------------------------------------
        catalog_id = self.catalog.create_catalog('dev', keywords=['name','model'])
        view_id    = self.discovery.create_view('exact_view', fields=['name','model'])
        catalog_ids = self.discovery.list_catalogs(view_id)
        self.assertTrue(catalog_ids == [catalog_id])

        view_id = self.discovery.create_view('another_view', fields=['name','model'])
        catalog_ids = self.discovery.list_catalogs(view_id)
        self.assertTrue(catalog_ids == [catalog_id])

        view_id = self.discovery.create_view('big_view', fields=['name'])
        catalog_ids = self.discovery.list_catalogs(view_id)
        self.assertTrue(catalog_ids != [catalog_id])

    @skipIf(not use_es, 'No ElasticSearch')
    def test_basic_searching(self):

        #- - - - - - - - - - - - - - - - - 
        # set up the fake resources
        #- - - - - - - - - - - - - - - - - 

        instrument_pool = [
            InstrumentDevice(name='sonobuoy1', firmware_version='1'),
            InstrumentDevice(name='sonobuoy2', firmware_version='2'),
            InstrumentDevice(name='sonobuoy3', firmware_version='3')
        ]
        for instrument in instrument_pool:
            self.rr.create(instrument)

        view_id = self.discovery.create_view('devices', fields=['firmware_version'])

        search_string = "search 'firmware_version' is '2' from '%s'"%view_id
        results = self.poll(5, self.discovery.parse,search_string)
        result  = results[0]['_source']
        self.assertIsInstance(result, InstrumentDevice)
        self.assertTrue(result.name == 'sonobuoy2')
        self.assertTrue(result.firmware_version == '2')


    @skipIf(not use_es, 'No ElasticSearch')
    def test_associative_searching(self):

        dp_id,_ = self.rr.create(DataProduct('test_foo'))
        ds_id,_ = self.rr.create(Dataset('test_bar', registered=True))
        self.rr.create_association(subject=dp_id, object=ds_id, predicate='hasDataset')

        search_string = "search 'type_' is 'Dataset' from 'resources_index' and belongs to '%s'" % dp_id

        results = self.poll(5, self.discovery.parse,search_string)
        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(ds_id in results)

    def test_iterative_associative_searching(self):
        #--------------------------------------------------------------------------------
        # Tests the ability to limit the iterations
        #--------------------------------------------------------------------------------
        dp        = DataProcess()
        transform = Transform()
        pd        = ProcessDefinition()

        dp_id, _        = self.rr.create(dp)
        transform_id, _ = self.rr.create(transform)
        pd_id, _        = self.rr.create(pd)

        self.rr.create_association(subject=dp_id, object=transform_id, predicate=PRED.hasTransform)
        self.rr.create_association(subject=transform_id, object=pd_id, predicate=PRED.hasProcessDefinition)

        search_string = "belongs to '%s' depth 1" % dp_id
        results = self.poll(5, self.discovery.parse,search_string)
        results = list([i._id for i in results])
        correct = [transform_id]
        self.assertTrue(results == correct, '%s' % results)

        search_string = "belongs to '%s' depth 2" % dp_id
        results = self.poll(5, self.discovery.parse,search_string)
        results = list([i._id for i in results])
        results.sort()
        correct = [transform_id, pd_id]
        correct.sort()
        self.assertTrue(results == correct)


    @skipIf(not use_es, 'No ElasticSearch')
    def test_ranged_value_searching(self):
        discovery = self.discovery
        rr        = self.rr
        
        view_id = discovery.create_view('bank_view', fields=['cash_balance'])
        bank_id, _ = rr.create(BankAccount(name='broke', cash_balance=10))

        search_string = "search 'cash_balance' values from 0 to 100 from '%s'" % view_id

        results = self.poll(5, discovery.parse, search_string)
        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == bank_id)
        
        bank_id, _ = rr.create(BankAccount(name='broke', cash_balance=90))

        search_string = "search 'cash_balance' values from 80 from '%s'" % view_id

        results = self.poll(5, discovery.parse, search_string)
        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == bank_id)

    @skipIf(not use_es, 'No ElasticSearch')
    def test_collections_searching(self):

        site_id, _ = self.rr.create(Site(name='black_mesa'))
        view_id    = self.discovery.create_view('big', fields=['name'])

        # Add the site to a new collection
        collection_id = self.ims.create_collection('resource_collection', [site_id])

        search_string = "search 'name' is '*' from '%s' and in '%s'" %(view_id, collection_id)

        results = self.poll(9, self.discovery.parse,search_string)

        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0] == site_id, '%s' % results)

    @skipIf(not use_es, 'No ElasticSearch')
    def test_search_by_name(self):
        inst_dev = InstrumentDevice(name='test_dev',serial_number='ABC123')

        dev_id, _ = self.rr.create(inst_dev)
        self.discovery.create_view('devs',fields=['name','serial_number'])

        search_string = "search 'serial_number' is 'abc*' from 'devs'"
        results = self.poll(9, self.discovery.parse,search_string)

        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == dev_id)

    @skipIf(not use_es, 'No ElasticSearch')
    def test_search_by_name_index(self):
        inst_dev = InstrumentDevice(name='test_dev',serial_number='ABC123')

        dev_id, _ = self.rr.create(inst_dev)
        search_string = "search 'serial_number' is 'abc*' from 'resources_index'"
        
        results = self.poll(9, self.discovery.parse,search_string)

        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == dev_id)

        bank_acc = BankAccount(name='blah', cash_balance=10)
        res_id , _ = self.rr.create(bank_acc)

        search_string = "search 'cash_balance' values from 0 to 100 from 'resources_index'"

        results = self.poll(9, self.discovery.parse,search_string)
        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == res_id)

    #@skipIf(not use_es, 'No ElasticSearch')
    @skip('Skip until time to refactor, data_format is removed from DataProduct resource')
    def test_data_product_search(self):

        # Create the dataproduct
        dp = DataProduct(name='test_product')
        dp.data_format.name = 'test_signal'
        dp.data_format.description = 'test signal'
        dp.data_format.character_set = 'utf8'
        dp.data_format.nominal_sampling_rate_maximum = '44000'
        dp.data_format.nominal_sampling_rate_minimum = '44000'
        dp.CDM_data_type = 'basic'
        dp_id, _ = self.rr.create(dp)

        search_string = "search 'data_format.name' is 'test_signal' from 'data_products_index'"
        results = self.poll(9, self.discovery.parse, search_string)

        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == dp_id)

        search_string = "search 'CDM_data_type' is 'basic' from 'data_products_index'"
        results = self.poll(9, self.discovery.parse, search_string)

        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == dp_id)
        
        search_string = "search 'data_format.character_set' is 'utf8' from 'data_products_index'"
        results = self.poll(9, self.discovery.parse, search_string)

        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == dp_id)

    @skipIf(not use_es, 'No ElasticSearch')
    def test_events_search(self):
        # Create a resource to force a new event

        dp = DataProcess()
        dp_id, rev = self.rr.create(dp)

        search_string = "SEARCH 'origin' IS '%s' FROM 'events_index'" % dp_id

        results = self.poll(9, self.discovery.parse,search_string)
        origin_type = results[0]['_source'].origin_type
        origin_id = results[0]['_source'].origin

        self.assertTrue(origin_type == RT.DataProcess)
        self.assertTrue(origin_id == dp_id)

    @skipIf(not use_es, 'No ElasticSearch')
    def test_geo_distance_search(self):

        pd = PlatformDevice(name='test_dev')

        pd_id, _ = self.rr.create(pd)

        search_string = "search 'index_location' geo distance 20 km from lat 0 lon 0 from 'devices_index'"

        results = self.poll(9, self.discovery.parse,search_string)

        self.assertIsNotNone(results, 'Results not found')

        self.assertTrue(results[0]['_id'] == pd_id)
        self.assertTrue(results[0]['_source'].name == 'test_dev')
   
    @skipIf(not use_es, 'No ElasticSearch')
    def test_geo_bbox_search(self):

        pd = PlatformDevice(name='test_dev')
        pd.index_location.lat = 5
        pd.index_location.lon = 5

        pd_id, _ = self.rr.create(pd)

        search_string = "search 'index_location' geo box top-left lat 10 lon 0 bottom-right lat 0 lon 10 from 'devices_index'"

        results = self.poll(9, self.discovery.parse,search_string)

        self.assertIsNotNone(results, 'Results not found')

        self.assertTrue(results[0]['_id'] == pd_id)
        self.assertTrue(results[0]['_source'].name == 'test_dev')


    @skipIf(not use_es, 'No ElasticSearch')
    def test_time_search(self):
        today     = date.today()
        yesterday = today - timedelta(days=1)
        tomorrow  = today + timedelta(days=1)

        data_product = DataProduct()
        dp_id, _ = self.rr.create(data_product)
        
        search_string = "search 'ts_created' time from '%s' to '%s' from 'data_products_index'" % (yesterday, tomorrow)

        results = self.poll(9, self.discovery.parse,search_string)

        self.assertIsNotNone(results,'Results not found')

        self.assertTrue(results[0]['_id'] == dp_id)
        
        search_string = "search 'ts_created' time from '%s' from 'data_products_index'" % yesterday

        results = self.poll(9, self.discovery.parse,search_string)

        self.assertIsNotNone(results,'Results not found')

        self.assertTrue(results[0]['_id'] == dp_id)

        
    @skipIf(not use_es, 'No ElasticSearch')
    def test_user_search(self):
        user = UserInfo()
        user.name = 'test'
        user.contact.phones.append('5551212')

        user_id, _ = self.rr.create(user)

        search_string = 'search "name" is "test" from "users_index"'

        results = self.poll(9, self.discovery.parse,search_string)

        self.assertIsNotNone(results, 'Results not found')

        self.assertTrue(results[0]['_id'] == user_id)
        self.assertTrue(results[0]['_source'].name == 'test')

        search_string = 'search "contact.phones" is "5551212" from "users_index"'
        results = self.poll(9, self.discovery.parse,search_string)

        self.assertIsNotNone(results, 'Results not found')

        self.assertTrue(results[0]['_id'] == user_id)
        self.assertTrue(results[0]['_source'].name == 'test')


    @skipIf(not use_es, 'No ElasticSearch')
    def test_subobject_search(self):
        contact = ContactInformation()
        contact.email = '*****@*****.**'
        contact.individual_name_family = 'Tester'
        contact.individual_names_given = 'Intern'

        dp = DataProduct(name='example')
        dp.contacts.append(contact)

        dp_id,_ = self.rr.create(dp)

        #--------------------------------------------------------------------------------
        # Example using the full field name
        #--------------------------------------------------------------------------------
        search_string = 'search "contacts.email" is "*****@*****.**" from "data_products"'
        results = self.poll(9, self.discovery.parse, search_string)
        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == dp_id)
        self.assertEquals(results[0]['_source'].name, 'example')

        #--------------------------------------------------------------------------------
        # Example using a sub-object's field name (ambiguous searching)
        #--------------------------------------------------------------------------------
        search_string = 'search "individual_names_given" is "Intern" from "data_products"'
        results = self.poll(9, self.discovery.parse, search_string)
        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == dp_id)
        self.assertEquals(results[0]['_source'].name, 'example')

    @skipIf(not use_es, 'No ElasticSearch')
    def test_descriptive_phrase_search(self):
        dp = DataProduct(name='example', description='This is simply a description for this data product')
        dp_id, _ = self.rr.create(dp)

        search_string = 'search "description" like "description for" from "data_products"'
        results = self.poll(9, self.discovery.parse, search_string)
        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == dp_id)
        self.assertEquals(results[0]['_source'].name, 'example')


    @skipIf(not use_es, 'No ElasticSearch')
    def test_ownership_searching(self):
        # Create two data products so that there is competition to the search, one is parsed 
        # (with conductivity as a parameter) and the other is raw
        dp = DataProduct(name='example dataproduct')
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict')
        stream_def_id = self.pubsub_management.create_stream_definition('ctd parsed', parameter_dictionary_id=pdict_id)
        tdom, sdom = time_series_domain()
        dp.spatial_domain = sdom.dump()
        dp.temporal_domain = tdom.dump()
        dp_id = self.data_product_management.create_data_product(dp, stream_definition_id=stream_def_id, exchange_point='xp1')

        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_raw_param_dict')
        stream_def_id = self.pubsub_management.create_stream_definition('ctd raw', parameter_dictionary_id=pdict_id)
        dp = DataProduct(name='WRONG')
        dp.spatial_domain = sdom.dump()
        dp.temporal_domain = tdom.dump()
        self.data_product_management.create_data_product(dp, stream_definition_id=stream_def_id, exchange_point='xp1')

        parameter_search = 'search "name" is "conductivity" from "resources_index"'
        results = self.poll(9, self.discovery.parse, parameter_search)
        param_id = results[0]['_id']

        data_product_search = 'search "name" is "*" from "data_products_index" and has "%s"' % param_id
        results = self.poll(9, self.discovery.parse, data_product_search)
        print results
        self.assertEquals(results[0], dp_id)
Пример #7
0
class DiscoveryIntTest(IonIntegrationTestCase):
    def setUp(self):
        super(DiscoveryIntTest, self).setUp()

        self._start_container()
        self.addCleanup(DiscoveryIntTest.es_cleanup)
        self.container.start_rel_from_url('res/deploy/r2dm.yml')

        self.discovery = DiscoveryServiceClient()
        self.catalog   = CatalogManagementServiceClient()
        self.ims       = IndexManagementServiceClient()
        self.rr        = ResourceRegistryServiceClient()

        if use_es:
            self.es_host   = CFG.get_safe('server.elasticsearch.host', 'localhost')
            self.es_port   = CFG.get_safe('server.elasticsearch.port', '9200')
            CFG.server.elasticsearch.shards         = 1
            CFG.server.elasticsearch.replicas       = 0
            CFG.server.elasticsearch.river_shards   = 1
            CFG.server.elasticsearch.river_replicas = 0
            self.es = ep.ElasticSearch(
                host=self.es_host,
                port=self.es_port,
                timeout=10,
                verbose=True
            )
            op = DotDict(CFG)
            op.op = 'clean_bootstrap'
            self.container.spawn_process('index_bootstrap','ion.processes.bootstrap.index_bootstrap','IndexBootStrap', op)

    @staticmethod
    def es_cleanup():
        es_host = CFG.get_safe('server.elasticsearch.host', 'localhost')
        es_port = CFG.get_safe('server.elasticsearch.port', '9200')
        es = ep.ElasticSearch(
            host=es_host,
            port=es_port,
            timeout=10
        )
        indexes = STD_INDEXES.keys()
        indexes.append('%s_resources_index' % get_sys_name().lower())
        indexes.append('%s_events_index' % get_sys_name().lower())

        for index in indexes:
            IndexManagementService._es_call(es.river_couchdb_delete,index)
            IndexManagementService._es_call(es.index_delete,index)


    def poll(self, tries, callback, *args, **kwargs):
        '''
        Polling wrapper for queries
        Elasticsearch may not index and cache the changes right away so we may need 
        a couple of tries and a little time to go by before the results show.
        '''
        for i in xrange(tries):
            retval = callback(*args, **kwargs)
            if retval:
                return retval
            time.sleep(0.2)
        return None


    def test_traversal(self):
        dp        = DataProcess()
        transform = Transform()
        pd        = ProcessDefinition()

        dp_id, _        = self.rr.create(dp)
        transform_id, _ = self.rr.create(transform)
        pd_id, _        = self.rr.create(pd)

        self.rr.create_association(subject=dp_id, object=transform_id, predicate=PRED.hasTransform)
        self.rr.create_association(subject=transform_id, object=pd_id, predicate=PRED.hasProcessDefinition)

        results = self.discovery.traverse(dp_id)
        results.sort()
        correct = [pd_id, transform_id]
        correct.sort()
        self.assertTrue(results == correct, '%s' % results)

    def test_iterative_traversal(self):
        dp        = DataProcess()
        transform = Transform()
        pd        = ProcessDefinition()

        dp_id, _        = self.rr.create(dp)
        transform_id, _ = self.rr.create(transform)
        pd_id, _        = self.rr.create(pd)

        self.rr.create_association(subject=dp_id, object=transform_id, predicate=PRED.hasTransform)
        self.rr.create_association(subject=transform_id, object=pd_id, predicate=PRED.hasProcessDefinition)

        results = self.discovery.iterative_traverse(dp_id)
        results.sort()
        correct = [transform_id]
        self.assertTrue(results == correct)

        results = self.discovery.iterative_traverse(dp_id, 1)
        results.sort()
        correct = [transform_id, pd_id]
        correct.sort()
        self.assertTrue(results == correct)
    
    @skipIf(not use_es, 'No ElasticSearch')
    def test_view_crud(self):
        view_id = self.discovery.create_view('big_view',fields=['name'])
        catalog_id = self.discovery.list_catalogs(view_id)[0]
        index_ids = self.catalog.list_indexes(catalog_id)
        self.assertTrue(len(index_ids))

        view = self.discovery.read_view(view_id)
        self.assertIsInstance(view,View)
        self.assertTrue(view.name == 'big_view')

        view.name = 'not_so_big_view'

        self.discovery.update_view(view)

        view = self.discovery.read_view(view_id)
        self.assertTrue(view.name == 'not_so_big_view')

        self.discovery.delete_view(view_id)
        with self.assertRaises(NotFound):
            self.discovery.read_view(view_id)

    def test_view_best_match(self):
        #---------------------------------------------------------------
        # Matches the best catalog available OR creates a new one
        #---------------------------------------------------------------
        catalog_id = self.catalog.create_catalog('dev', keywords=['name','model'])
        view_id    = self.discovery.create_view('exact_view', fields=['name','model'])
        catalog_ids = self.discovery.list_catalogs(view_id)
        self.assertTrue(catalog_ids == [catalog_id])

        view_id = self.discovery.create_view('another_view', fields=['name','model'])
        catalog_ids = self.discovery.list_catalogs(view_id)
        self.assertTrue(catalog_ids == [catalog_id])

        view_id = self.discovery.create_view('big_view', fields=['name'])
        catalog_ids = self.discovery.list_catalogs(view_id)
        self.assertTrue(catalog_ids != [catalog_id])

    @skipIf(not use_es, 'No ElasticSearch')
    def test_basic_searching(self):

        #- - - - - - - - - - - - - - - - - 
        # set up the fake resources
        #- - - - - - - - - - - - - - - - - 

        instrument_pool = [
            InstrumentDevice(name='sonobuoy1', hardware_version='1'),
            InstrumentDevice(name='sonobuoy2', hardware_version='2'),
            InstrumentDevice(name='sonobuoy3', hardware_version='3')
        ]
        for instrument in instrument_pool:
            self.rr.create(instrument)

        view_id = self.discovery.create_view('devices', fields=['hardware_version'])

        search_string = "search 'hardware_version' is '2' from '%s'"%view_id
        results = self.poll(5, self.discovery.parse,search_string)
        result  = results[0]['_source']
        self.assertIsInstance(result, InstrumentDevice)
        self.assertTrue(result.name == 'sonobuoy2')
        self.assertTrue(result.hardware_version == '2')


    @skipIf(not use_es, 'No ElasticSearch')
    def test_associative_searching(self):

        view_id = self.discovery.create_view('devices', fields=['model'])
        site_id,_ = self.rr.create(Site('my_site'))
        pd_id, _  = self.rr.create(PlatformDevice('my_device', model='abc123'))
        self.rr.create_association(subject=site_id, object=pd_id, predicate=PRED.hasDevice)

        search_string = "search 'model' is 'abc*' from '%s' and belongs to '%s'"%(view_id, site_id)

        results = self.poll(5, self.discovery.parse,search_string)
        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(pd_id in results)

    def test_iterative_associative_searching(self):
        #--------------------------------------------------------------------------------
        # Tests the ability to limit the iterations
        #--------------------------------------------------------------------------------
        dp        = DataProcess()
        transform = Transform()
        pd        = ProcessDefinition()

        dp_id, _        = self.rr.create(dp)
        transform_id, _ = self.rr.create(transform)
        pd_id, _        = self.rr.create(pd)

        self.rr.create_association(subject=dp_id, object=transform_id, predicate=PRED.hasTransform)
        self.rr.create_association(subject=transform_id, object=pd_id, predicate=PRED.hasProcessDefinition)

        search_string = "belongs to '%s' depth 1" % dp_id
        results = self.poll(5, self.discovery.parse,search_string)
        results = list([i._id for i in results])
        correct = [transform_id]
        self.assertTrue(results == correct, '%s' % results)

        search_string = "belongs to '%s' depth 2" % dp_id
        results = self.poll(5, self.discovery.parse,search_string)
        results = list([i._id for i in results])
        results.sort()
        correct = [transform_id, pd_id]
        correct.sort()
        self.assertTrue(results == correct)


    @skipIf(not use_es, 'No ElasticSearch')
    def test_ranged_value_searching(self):
        discovery = self.discovery
        rr        = self.rr
        
        view_id = discovery.create_view('bank_view', fields=['cash_balance'])
        bank_id, _ = rr.create(BankAccount(name='broke', cash_balance=10))

        search_string = "search 'cash_balance' values from 0 to 100 from '%s'" % view_id

        results = self.poll(5, discovery.parse, search_string)
        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == bank_id)

    @skipIf(not use_es, 'No ElasticSearch')
    def test_collections_searching(self):

        site_id, _ = self.rr.create(Site(name='black_mesa'))
        view_id    = self.discovery.create_view('big', fields=['name'])

        # Add the site to a new collection
        collection_id = self.ims.create_collection('resource_collection', [site_id])

        search_string = "search 'name' is '*' from '%s' and in '%s'" %(view_id, collection_id)

        results = self.poll(9, self.discovery.parse,search_string)

        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0] == site_id, '%s' % results)

    @skipIf(not use_es, 'No ElasticSearch')
    def test_search_by_name(self):
        inst_dev = InstrumentDevice(name='test_dev',serial_number='ABC123')

        dev_id, _ = self.rr.create(inst_dev)
        self.discovery.create_view('devs',fields=['name','serial_number'])

        search_string = "search 'serial_number' is 'abc*' from 'devs'"
        results = self.poll(9, self.discovery.parse,search_string)

        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == dev_id)

    @skipIf(not use_es, 'No ElasticSearch')
    def test_search_by_name_index(self):
        inst_dev = InstrumentDevice(name='test_dev',serial_number='ABC123')

        dev_id, _ = self.rr.create(inst_dev)
        search_string = "search 'serial_number' is 'abc*' from 'resources_index'"
        
        results = self.poll(9, self.discovery.parse,search_string)

        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == dev_id)

        bank_acc = BankAccount(name='blah', cash_balance=10)
        res_id , _ = self.rr.create(bank_acc)

        search_string = "search 'cash_balance' values from 0 to 100 from 'resources_index'"

        results = self.poll(9, self.discovery.parse,search_string)
        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == res_id)

    @skipIf(not use_es, 'No ElasticSearch')
    def test_data_product_search(self):

        # Create the dataproduct
        dp = DataProduct(name='test_product')
        dp.data_format.name = 'test_signal'
        dp.data_format.description = 'test signal'
        dp.data_format.character_set = 'utf8'
        dp.data_format.nominal_sampling_rate_maximum = '44000'
        dp.data_format.nominal_sampling_rate_minimum = '44000'
        dp.data_product_level = 'basic'
        dp_id, _ = self.rr.create(dp)

        search_string = "search 'data_format.name' is 'test_signal' from 'data_products_index'"
        results = self.poll(9, self.discovery.parse, search_string)

        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == dp_id)

        search_string = "search 'data_product_level' is 'basic' from 'data_products_index'"
        results = self.poll(9, self.discovery.parse, search_string)

        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == dp_id)
        
        search_string = "search 'data_format.character_set' is 'utf8' from 'data_products_index'"
        results = self.poll(9, self.discovery.parse, search_string)

        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == dp_id)

    @skipIf(not use_es, 'No ElasticSearch')
    def test_events_search(self):
        # Create a resource to force a new event

        dp = DataProcess()
        dp_id, rev = self.rr.create(dp)

        search_string = "SEARCH 'origin' IS '%s' FROM 'events_index'" % dp_id

        results = self.poll(9, self.discovery.parse,search_string)
        origin_type = results[0]['_source'].origin_type
        origin_id = results[0]['_source'].origin

        self.assertTrue(origin_type == RT.DataProcess)
        self.assertTrue(origin_id == dp_id)

    @skipIf(not use_es, 'No ElasticSearch')
    def test_geo_distance_search(self):

        pd = PlatformDevice(name='test_dev')

        pd_id, _ = self.rr.create(pd)

        search_string = "search 'nominal_location' geo distance 20 km from lat 0 lon 0 from 'devices_index'"

        results = self.poll(9, self.discovery.parse,search_string)

        self.assertIsNotNone(results, 'Results not found')

        self.assertTrue(results[0]['_id'] == pd_id)
        self.assertTrue(results[0]['_source'].name == 'test_dev')
   
    @skipIf(not use_es, 'No ElasticSearch')
    def test_geo_bbox_search(self):

        pd = PlatformDevice(name='test_dev')
        pd.nominal_location.lat = 5
        pd.nominal_location.lon = 5

        pd_id, _ = self.rr.create(pd)

        search_string = "search 'nominal_location' geo box top-left lat 10 lon 0 bottom-right lat 0 lon 10 from 'devices_index'"

        results = self.poll(9, self.discovery.parse,search_string)

        self.assertIsNotNone(results, 'Results not found')

        self.assertTrue(results[0]['_id'] == pd_id)
        self.assertTrue(results[0]['_source'].name == 'test_dev')
        
    @skipIf(not use_es, 'No ElasticSearch')
    def test_user_search(self):
        user = UserInfo()
        user.name = 'test'
        user.contact.phone = '5551212'

        user_id, _ = self.rr.create(user)

        search_string = 'search "name" is "test" from "users_index"'

        results = self.poll(9, self.discovery.parse,search_string)

        self.assertIsNotNone(results, 'Results not found')

        self.assertTrue(results[0]['_id'] == user_id)
        self.assertTrue(results[0]['_source'].name == 'test')

        search_string = 'search "contact.phone" is "5551212" from "users_index"'
        results = self.poll(9, self.discovery.parse,search_string)

        self.assertIsNotNone(results, 'Results not found')

        self.assertTrue(results[0]['_id'] == user_id)
        self.assertTrue(results[0]['_source'].name == 'test')
Пример #8
0
class DiscoveryIntTest(IonIntegrationTestCase):
    def setUp(self):
        super(DiscoveryIntTest, self).setUp()

        self._start_container()
        self.addCleanup(DiscoveryIntTest.es_cleanup)
        self.container.start_rel_from_url('res/deploy/r2dm.yml')

        self.discovery = DiscoveryServiceClient()
        self.catalog   = CatalogManagementServiceClient()
        self.ims       = IndexManagementServiceClient()
        self.rr        = ResourceRegistryServiceClient()

        if use_es:
            self.es_host   = CFG.get_safe('server.elasticsearch.host', 'localhost')
            self.es_port   = CFG.get_safe('server.elasticsearch.port', '9200')
            CFG.server.elasticsearch.shards         = 1
            CFG.server.elasticsearch.replicas       = 0
            CFG.server.elasticsearch.river_shards   = 1
            CFG.server.elasticsearch.river_replicas = 0
            self.es = ep.ElasticSearch(
                host=self.es_host,
                port=self.es_port,
                timeout=10,
                verbose=True
            )
            op = DotDict(CFG)
            op.op = 'clean_bootstrap'
            self.container.spawn_process('index_bootstrap','ion.processes.bootstrap.index_bootstrap','IndexBootStrap', op)

    @staticmethod
    def es_cleanup():
        es_host = CFG.get_safe('server.elasticsearch.host', 'localhost')
        es_port = CFG.get_safe('server.elasticsearch.port', '9200')
        es = ep.ElasticSearch(
            host=es_host,
            port=es_port,
            timeout=10
        )
        indexes = STD_INDEXES.keys()
        indexes.append('%s_resources_index' % get_sys_name().lower())
        indexes.append('%s_events_index' % get_sys_name().lower())

        for index in indexes:
            IndexManagementService._es_call(es.river_couchdb_delete,index)
            IndexManagementService._es_call(es.index_delete,index)


    def poll(self, tries, callback, *args, **kwargs):
        '''
        Polling wrapper for queries
        Elasticsearch may not index and cache the changes right away so we may need 
        a couple of tries and a little time to go by before the results show.
        '''
        for i in xrange(tries):
            retval = callback(*args, **kwargs)
            if retval:
                return retval
            time.sleep(0.2)
        return None


    def test_traversal(self):
        dp        = DataProcess()
        transform = Transform()
        pd        = ProcessDefinition()

        dp_id, _        = self.rr.create(dp)
        transform_id, _ = self.rr.create(transform)
        pd_id, _        = self.rr.create(pd)

        self.rr.create_association(subject=dp_id, object=transform_id, predicate=PRED.hasTransform)
        self.rr.create_association(subject=transform_id, object=pd_id, predicate=PRED.hasProcessDefinition)

        results = self.discovery.traverse(dp_id)
        results.sort()
        correct = [pd_id, transform_id]
        correct.sort()
        self.assertTrue(results == correct, '%s' % results)

    def test_iterative_traversal(self):
        dp        = DataProcess()
        transform = Transform()
        pd        = ProcessDefinition()

        dp_id, _        = self.rr.create(dp)
        transform_id, _ = self.rr.create(transform)
        pd_id, _        = self.rr.create(pd)

        self.rr.create_association(subject=dp_id, object=transform_id, predicate=PRED.hasTransform)
        self.rr.create_association(subject=transform_id, object=pd_id, predicate=PRED.hasProcessDefinition)

        results = self.discovery.iterative_traverse(dp_id)
        results.sort()
        correct = [transform_id]
        self.assertTrue(results == correct)

        results = self.discovery.iterative_traverse(dp_id, 1)
        results.sort()
        correct = [transform_id, pd_id]
        correct.sort()
        self.assertTrue(results == correct)
    
    @skipIf(not use_es, 'No ElasticSearch')
    def test_view_crud(self):
        view_id = self.discovery.create_view('big_view',fields=['name'])
        catalog_id = self.discovery.list_catalogs(view_id)[0]
        index_ids = self.catalog.list_indexes(catalog_id)
        self.assertTrue(len(index_ids))

        view = self.discovery.read_view(view_id)
        self.assertIsInstance(view,View)
        self.assertTrue(view.name == 'big_view')

        view.name = 'not_so_big_view'

        self.discovery.update_view(view)

        view = self.discovery.read_view(view_id)
        self.assertTrue(view.name == 'not_so_big_view')

        self.discovery.delete_view(view_id)
        with self.assertRaises(NotFound):
            self.discovery.read_view(view_id)

    def test_view_best_match(self):
        #---------------------------------------------------------------
        # Matches the best catalog available OR creates a new one
        #---------------------------------------------------------------
        catalog_id = self.catalog.create_catalog('dev', keywords=['name','model'])
        view_id    = self.discovery.create_view('exact_view', fields=['name','model'])
        catalog_ids = self.discovery.list_catalogs(view_id)
        self.assertTrue(catalog_ids == [catalog_id])

        view_id = self.discovery.create_view('another_view', fields=['name','model'])
        catalog_ids = self.discovery.list_catalogs(view_id)
        self.assertTrue(catalog_ids == [catalog_id])

        view_id = self.discovery.create_view('big_view', fields=['name'])
        catalog_ids = self.discovery.list_catalogs(view_id)
        self.assertTrue(catalog_ids != [catalog_id])

    @skipIf(not use_es, 'No ElasticSearch')
    def test_basic_searching(self):

        #- - - - - - - - - - - - - - - - - 
        # set up the fake resources
        #- - - - - - - - - - - - - - - - - 

        instrument_pool = [
            InstrumentDevice(name='sonobuoy1', hardware_version='1'),
            InstrumentDevice(name='sonobuoy2', hardware_version='2'),
            InstrumentDevice(name='sonobuoy3', hardware_version='3')
        ]
        for instrument in instrument_pool:
            self.rr.create(instrument)

        view_id = self.discovery.create_view('devices', fields=['hardware_version'])

        search_string = "search 'hardware_version' is '2' from '%s'"%view_id
        results = self.poll(5, self.discovery.parse,search_string)
        result  = results[0]['_source']
        self.assertIsInstance(result, InstrumentDevice)
        self.assertTrue(result.name == 'sonobuoy2')
        self.assertTrue(result.hardware_version == '2')


    @skipIf(not use_es, 'No ElasticSearch')
    def test_associative_searching(self):

        view_id = self.discovery.create_view('devices', fields=['model'])
        site_id,_ = self.rr.create(Site('my_site'))
        pd_id, _  = self.rr.create(PlatformDevice('my_device', model='abc123'))
        self.rr.create_association(subject=site_id, object=pd_id, predicate=PRED.hasDevice)

        search_string = "search 'model' is 'abc*' from '%s' and belongs to '%s'"%(view_id, site_id)

        results = self.poll(5, self.discovery.parse,search_string)
        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(pd_id in results)

    def test_iterative_associative_searching(self):
        #--------------------------------------------------------------------------------
        # Tests the ability to limit the iterations
        #--------------------------------------------------------------------------------
        dp        = DataProcess()
        transform = Transform()
        pd        = ProcessDefinition()

        dp_id, _        = self.rr.create(dp)
        transform_id, _ = self.rr.create(transform)
        pd_id, _        = self.rr.create(pd)

        self.rr.create_association(subject=dp_id, object=transform_id, predicate=PRED.hasTransform)
        self.rr.create_association(subject=transform_id, object=pd_id, predicate=PRED.hasProcessDefinition)

        search_string = "belongs to '%s' depth 1" % dp_id
        results = self.poll(5, self.discovery.parse,search_string)
        results = list([i._id for i in results])
        correct = [transform_id]
        self.assertTrue(results == correct, '%s' % results)

        search_string = "belongs to '%s' depth 2" % dp_id
        results = self.poll(5, self.discovery.parse,search_string)
        results = list([i._id for i in results])
        results.sort()
        correct = [transform_id, pd_id]
        correct.sort()
        self.assertTrue(results == correct)


    @skipIf(not use_es, 'No ElasticSearch')
    def test_ranged_value_searching(self):
        discovery = self.discovery
        rr        = self.rr
        
        view_id = discovery.create_view('bank_view', fields=['cash_balance'])
        bank_id, _ = rr.create(BankAccount(name='broke', cash_balance=10))

        search_string = "search 'cash_balance' values from 0 to 100 from '%s'" % view_id

        results = self.poll(5, discovery.parse, search_string)
        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == bank_id)

    @skipIf(not use_es, 'No ElasticSearch')
    def test_collections_searching(self):

        site_id, _ = self.rr.create(Site(name='black_mesa'))
        view_id    = self.discovery.create_view('big', fields=['name'])

        # Add the site to a new collection
        collection_id = self.ims.create_collection('resource_collection', [site_id])

        search_string = "search 'name' is '*' from '%s' and in '%s'" %(view_id, collection_id)

        results = self.poll(9, self.discovery.parse,search_string)

        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0] == site_id, '%s' % results)

    @skipIf(not use_es, 'No ElasticSearch')
    def test_search_by_name(self):
        inst_dev = InstrumentDevice(name='test_dev',serial_number='ABC123')

        dev_id, _ = self.rr.create(inst_dev)
        self.discovery.create_view('devs',fields=['name','serial_number'])

        search_string = "search 'serial_number' is 'abc*' from 'devs'"
        results = self.poll(9, self.discovery.parse,search_string)

        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == dev_id)

    @skipIf(not use_es, 'No ElasticSearch')
    def test_search_by_name_index(self):
        inst_dev = InstrumentDevice(name='test_dev',serial_number='ABC123')

        dev_id, _ = self.rr.create(inst_dev)
        search_string = "search 'serial_number' is 'abc*' from 'resources_index'"
        
        results = self.poll(9, self.discovery.parse,search_string)

        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == dev_id)

        bank_acc = BankAccount(name='blah', cash_balance=10)
        res_id , _ = self.rr.create(bank_acc)

        search_string = "search 'cash_balance' values from 0 to 100 from 'resources_index'"

        results = self.poll(9, self.discovery.parse,search_string)
        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == res_id)

    @skipIf(not use_es, 'No ElasticSearch')
    def test_data_product_search(self):

        # Create the dataproduct
        dp = DataProduct(name='test_product')
        dp.data_format.name = 'test_signal'
        dp.data_format.description = 'test signal'
        dp.data_format.character_set = 'utf8'
        dp.data_format.nominal_sampling_rate_maximum = '44000'
        dp.data_format.nominal_sampling_rate_minimum = '44000'
        dp.data_product_level = 'basic'
        dp_id, _ = self.rr.create(dp)

        search_string = "search 'data_format.name' is 'test_signal' from 'data_products_index'"
        results = self.poll(9, self.discovery.parse, search_string)

        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == dp_id)

        search_string = "search 'data_product_level' is 'basic' from 'data_products_index'"
        results = self.poll(9, self.discovery.parse, search_string)

        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == dp_id)
        
        search_string = "search 'data_format.character_set' is 'utf8' from 'data_products_index'"
        results = self.poll(9, self.discovery.parse, search_string)

        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == dp_id)

    @skipIf(not use_es, 'No ElasticSearch')
    def test_events_search(self):
        # Create a resource to force a new event

        dp = DataProcess()
        dp_id, rev = self.rr.create(dp)

        search_string = "SEARCH 'origin' IS '%s' FROM 'events_index'" % dp_id

        results = self.poll(9, self.discovery.parse,search_string)
        origin_type = results[0]['_source'].origin_type
        origin_id = results[0]['_source'].origin

        self.assertTrue(origin_type == RT.DataProcess)
        self.assertTrue(origin_id == dp_id)

    @skipIf(not use_es, 'No ElasticSearch')
    def test_geo_distance_search(self):

        pd = PlatformDevice(name='test_dev')

        pd_id, _ = self.rr.create(pd)

        search_string = "search 'nominal_location' geo distance 20 km from lat 0 lon 0 from 'devices_index'"

        results = self.poll(9, self.discovery.parse,search_string)

        self.assertIsNotNone(results, 'Results not found')

        self.assertTrue(results[0]['_id'] == pd_id)
        self.assertTrue(results[0]['_source'].name == 'test_dev')
   
    @skipIf(not use_es, 'No ElasticSearch')
    def test_geo_bbox_search(self):

        pd = PlatformDevice(name='test_dev')
        pd.nominal_location.lat = 5
        pd.nominal_location.lon = 5

        pd_id, _ = self.rr.create(pd)

        search_string = "search 'nominal_location' geo box top-left lat 10 lon 0 bottom-right lat 0 lon 10 from 'devices_index'"

        results = self.poll(9, self.discovery.parse,search_string)

        self.assertIsNotNone(results, 'Results not found')

        self.assertTrue(results[0]['_id'] == pd_id)
        self.assertTrue(results[0]['_source'].name == 'test_dev')
class CatalogManagementIntTest(IonIntegrationTestCase):
    def setUp(self):
        super(CatalogManagementIntTest,self).setUp()

        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2dm.yml')

        self.cms_cli = CatalogManagementServiceClient()
        self.rr_cli  = ResourceRegistryServiceClient()

    def test_create_catalog(self):
        #-------------------------------------
        # Tests basic catalog creation
        #-------------------------------------

        index1 = Index(name='index1')
        index1.options.attribute_match = ['common']
        index_id, _ = self.rr_cli.create(index1)

        catalog_id = self.cms_cli.create_catalog('common_catalog', ['common'])
        self.assertTrue(index_id in self.cms_cli.list_indexes(catalog_id))

    def test_create_catalog_matching(self):
        #-------------------------------------------
        # Tests catalog creation and index matching
        #-------------------------------------------

        indexes = [
            Index(name='index1'),
            Index(name='index2'),
            Index(name='index3')
        ]
        indexes[0].options = SearchOptions(**{ # Devices
            'attribute_match' : ['name','model','serial'],
            'geo_fields'      : ['nominal_location']
        })
        indexes[1].options = SearchOptions(**{ # BankAccount
            'attribute_match' : ['name','description','owner'],
            'range_fields'    : ['cash_balance']
        })
        indexes[2].options = SearchOptions(**{ # Hybrid
            'attribute_match' : ['name'],
            'range_fields'    : ['arc'],
            'geo_fields'      : ['nominal_location']
        })

        index_ids = list([ self.rr_cli.create(i)[0] for i in indexes])

        devices_catalog_id = self.cms_cli.create_catalog('devices_catalog', ['model'])
        self.assertTrue(index_ids[0] in self.cms_cli.list_indexes(devices_catalog_id), 'The catalog did\'nt match the correct index.')

        accounts_catalog_id = self.cms_cli.create_catalog('accounts_catalog', ['cash_balance'])
        self.assertTrue(index_ids[1] in self.cms_cli.list_indexes(accounts_catalog_id), 'The catalog did\'nt match the correct index.')
        
        geo_catalog_id = self.cms_cli.create_catalog('geo_catalog', ['nominal_location'])
        self.assertTrue(index_ids[2] in self.cms_cli.list_indexes(geo_catalog_id), 'The catalog did\'nt match the correct index.')

        names_catalog_id = self.cms_cli.create_catalog('names_catalog', ['name'])
        names_list = self.cms_cli.list_indexes(names_catalog_id)
        for i in index_ids:
            self.assertIn(i,names_list)

    def test_catalog_field_exclusion(self):
        half_match = Index(name='half_match', options=SearchOptions(attribute_match=['one']))
        full_match = Index(name='full_match', options=SearchOptions(attribute_match=['one','two']))

        half_match_id, _ = self.rr_cli.create(half_match)
        full_match_id, _ = self.rr_cli.create(full_match)

        catalog_id = self.cms_cli.create_catalog('test_cat', keywords=['one','two'])
        index_list = self.cms_cli.list_indexes(catalog_id, id_only=True)
        self.assertTrue(index_list == [full_match_id])



    def test_update_catalog(self):
        empty_catalog_id = self.cms_cli.create_catalog('empty_catalog')

        empty_catalog = self.rr_cli.read(empty_catalog_id)
        empty_catalog.description = 'testing update'
        self.cms_cli.update_catalog(empty_catalog)

        empty_catalog = self.rr_cli.read(empty_catalog_id)
        self.assertTrue(empty_catalog.description == 'testing update')

    def test_read_catalog(self):
        catalog_res = Catalog(name='fake_catalog')
        catalog_id, _ = self.rr_cli.create(catalog_res)
        
        cat = self.cms_cli.read_catalog(catalog_id)
        self.assertTrue(cat.name == 'fake_catalog')

    def test_delete_catalog(self):
        empty_catalog_id, _ = self.rr_cli.create(Catalog(name='empty_catalog'))

        self.rr_cli.read(empty_catalog_id) # Throws exception if it doesn't exist

        self.cms_cli.delete_catalog(empty_catalog_id)

        with self.assertRaises(NotFound):
            self.rr_cli.read(empty_catalog_id)

        with self.assertRaises(NotFound):
            self.cms_cli.read_catalog(empty_catalog_id)
Пример #10
0
class CatalogManagementIntTest(IonIntegrationTestCase):
    def setUp(self):
        super(CatalogManagementIntTest,self).setUp()

        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2dm.yml')

        self.cms_cli = CatalogManagementServiceClient()
        self.rr_cli  = ResourceRegistryServiceClient()

    def test_create_catalog(self):
        #-------------------------------------
        # Tests basic catalog creation
        #-------------------------------------

        index1 = Index(name='index1')
        index1.options.attribute_match = ['common']
        index_id, _ = self.rr_cli.create(index1)

        catalog_id = self.cms_cli.create_catalog('common_catalog', ['common'])
        self.assertTrue(index_id in self.cms_cli.list_indexes(catalog_id))

    def test_create_catalog_matching(self):
        #-------------------------------------------
        # Tests catalog creation and index matching
        #-------------------------------------------

        indexes = [
            Index(name='index1'),
            Index(name='index2'),
            Index(name='index3')
        ]
        indexes[0].options = SearchOptions(**{ # Devices
            'attribute_match' : ['name','model','serial'],
            'geo_fields'      : ['nominal_location']
        })
        indexes[1].options = SearchOptions(**{ # BankAccount
            'attribute_match' : ['name','description','owner'],
            'range_fields'    : ['cash_balance']
        })
        indexes[2].options = SearchOptions(**{ # Hybrid
            'attribute_match' : ['name'],
            'range_fields'    : ['arc'],
            'geo_fields'      : ['nominal_location']
        })

        index_ids = list([ self.rr_cli.create(i)[0] for i in indexes])

        devices_catalog_id = self.cms_cli.create_catalog('devices_catalog', ['model'])
        self.assertTrue(index_ids[0] in self.cms_cli.list_indexes(devices_catalog_id), 'The catalog did\'nt match the correct index.')

        accounts_catalog_id = self.cms_cli.create_catalog('accounts_catalog', ['cash_balance'])
        self.assertTrue(index_ids[1] in self.cms_cli.list_indexes(accounts_catalog_id), 'The catalog did\'nt match the correct index.')
        
        geo_catalog_id = self.cms_cli.create_catalog('geo_catalog', ['nominal_location'])
        self.assertTrue(index_ids[2] in self.cms_cli.list_indexes(geo_catalog_id), 'The catalog did\'nt match the correct index.')

        names_catalog_id = self.cms_cli.create_catalog('names_catalog', ['name'])
        names_list = self.cms_cli.list_indexes(names_catalog_id)
        self.assertTrue(set(index_ids) == set(names_list), 'The catalog did\'nt match the correct index.')

    def test_catalog_field_exclusion(self):
        half_match = Index(name='half_match', options=SearchOptions(attribute_match=['one']))
        full_match = Index(name='full_match', options=SearchOptions(attribute_match=['one','two']))

        half_match_id, _ = self.rr_cli.create(half_match)
        full_match_id, _ = self.rr_cli.create(full_match)

        catalog_id = self.cms_cli.create_catalog('test_cat', keywords=['one','two'])
        index_list = self.cms_cli.list_indexes(catalog_id, id_only=True)
        self.assertTrue(index_list == [full_match_id])



    def test_update_catalog(self):
        empty_catalog_id = self.cms_cli.create_catalog('empty_catalog')

        empty_catalog = self.rr_cli.read(empty_catalog_id)
        empty_catalog.description = 'testing update'
        self.cms_cli.update_catalog(empty_catalog)

        empty_catalog = self.rr_cli.read(empty_catalog_id)
        self.assertTrue(empty_catalog.description == 'testing update')

    def test_read_catalog(self):
        catalog_res = Catalog(name='fake_catalog')
        catalog_id, _ = self.rr_cli.create(catalog_res)
        
        cat = self.cms_cli.read_catalog(catalog_id)
        self.assertTrue(cat.name == 'fake_catalog')

    def test_delete_catalog(self):
        empty_catalog_id, _ = self.rr_cli.create(Catalog(name='empty_catalog'))

        self.rr_cli.read(empty_catalog_id) # Throws exception if it doesn't exist

        self.cms_cli.delete_catalog(empty_catalog_id)

        with self.assertRaises(NotFound):
            self.rr_cli.read(empty_catalog_id)

        with self.assertRaises(NotFound):
            self.cms_cli.read_catalog(empty_catalog_id)
class CatalogManagementIntTest(IonIntegrationTestCase):
    def setUp(self):
        super(CatalogManagementIntTest, self).setUp()

        self._start_container()
        self.container.start_rel_from_url("res/deploy/r2dm.yml")

        self.cms_cli = CatalogManagementServiceClient()
        self.rr_cli = ResourceRegistryServiceClient()

    def test_create_catalog(self):
        # -------------------------------------
        # Tests basic catalog creation
        # -------------------------------------

        index1 = Index(name="index1")
        index1.options.attribute_match = ["common"]
        index_id, _ = self.rr_cli.create(index1)

        catalog_id = self.cms_cli.create_catalog("common_catalog", ["common"])
        self.assertTrue(index_id in self.cms_cli.list_indexes(catalog_id))

    def test_create_catalog_matching(self):
        # -------------------------------------------
        # Tests catalog creation and index matching
        # -------------------------------------------

        indexes = [Index(name="index1"), Index(name="index2"), Index(name="index3")]
        indexes[0].options = SearchOptions(
            **{"attribute_match": ["name", "model", "serial"], "geo_fields": ["nominal_location"]}  # Devices
        )
        indexes[1].options = SearchOptions(
            **{"attribute_match": ["name", "description", "owner"], "range_fields": ["cash_balance"]}  # BankAccount
        )
        indexes[2].options = SearchOptions(
            **{"attribute_match": ["name"], "range_fields": ["arc"], "geo_fields": ["nominal_location"]}  # Hybrid
        )

        index_ids = list([self.rr_cli.create(i)[0] for i in indexes])

        devices_catalog_id = self.cms_cli.create_catalog("devices_catalog", ["model"])
        self.assertTrue(
            index_ids[0] in self.cms_cli.list_indexes(devices_catalog_id), "The catalog did'nt match the correct index."
        )

        accounts_catalog_id = self.cms_cli.create_catalog("accounts_catalog", ["cash_balance"])
        self.assertTrue(
            index_ids[1] in self.cms_cli.list_indexes(accounts_catalog_id),
            "The catalog did'nt match the correct index.",
        )

        geo_catalog_id = self.cms_cli.create_catalog("geo_catalog", ["nominal_location"])
        self.assertTrue(
            index_ids[2] in self.cms_cli.list_indexes(geo_catalog_id), "The catalog did'nt match the correct index."
        )

        names_catalog_id = self.cms_cli.create_catalog("names_catalog", ["name"])
        names_list = self.cms_cli.list_indexes(names_catalog_id)
        self.assertTrue(set(index_ids) == set(names_list), "The catalog did'nt match the correct index.")

    def test_catalog_field_exclusion(self):
        half_match = Index(name="half_match", options=SearchOptions(attribute_match=["one"]))
        full_match = Index(name="full_match", options=SearchOptions(attribute_match=["one", "two"]))

        half_match_id, _ = self.rr_cli.create(half_match)
        full_match_id, _ = self.rr_cli.create(full_match)

        catalog_id = self.cms_cli.create_catalog("test_cat", keywords=["one", "two"])
        index_list = self.cms_cli.list_indexes(catalog_id, id_only=True)
        self.assertTrue(index_list == [full_match_id])

    def test_update_catalog(self):
        empty_catalog_id = self.cms_cli.create_catalog("empty_catalog")

        empty_catalog = self.rr_cli.read(empty_catalog_id)
        empty_catalog.description = "testing update"
        self.cms_cli.update_catalog(empty_catalog)

        empty_catalog = self.rr_cli.read(empty_catalog_id)
        self.assertTrue(empty_catalog.description == "testing update")

    def test_read_catalog(self):
        catalog_res = Catalog(name="fake_catalog")
        catalog_id, _ = self.rr_cli.create(catalog_res)

        cat = self.cms_cli.read_catalog(catalog_id)
        self.assertTrue(cat.name == "fake_catalog")

    def test_delete_catalog(self):
        empty_catalog_id, _ = self.rr_cli.create(Catalog(name="empty_catalog"))

        self.rr_cli.read(empty_catalog_id)  # Throws exception if it doesn't exist

        self.cms_cli.delete_catalog(empty_catalog_id)

        with self.assertRaises(NotFound):
            self.rr_cli.read(empty_catalog_id)

        with self.assertRaises(NotFound):
            self.cms_cli.read_catalog(empty_catalog_id)