def setUp(self): mock_clients = self._create_service_mock('index_management') self.index_management = IndexManagementService() self.index_management.clients = mock_clients self.rr_create = mock_clients.resource_registry.create self.rr_read = mock_clients.resource_registry.read self.rr_update = mock_clients.resource_registry.update self.rr_delete = mock_clients.resource_registry.delete self.rr_find_resources = mock_clients.resource_registry.find_resources self.rr_find_assocs = mock_clients.resource_registry.find_associations self.rr_find_subj = mock_clients.resource_registry.find_subjects self.rr_find_obj = mock_clients.resource_registry.find_objects self.rr_delete_assoc = mock_clients.resource_registry.delete_association self.get_datastore = Mock() self.db_create = Mock() self.get_datastore.return_value = DotDict( {'datastore_name': 'test_datastore'}) self.index_management.container = DotDict({ 'datastore_manager': DotDict({'get_datastore': self.get_datastore}) }) self.index_name = 'test_index'
def query_time( self, source_id="", field="", from_value=None, to_value=None, order=None, limit=0, offset=0, id_only=False ): if not self.use_es: raise BadRequest("Can not make queries without ElasticSearch, enable in res/config/pyon.yml") if from_value is not None: validate_is_instance(from_value, basestring, '"From" is not a valid string (%s)' % from_value) if to_value is not None: validate_is_instance(to_value, basestring, '"To" is not a valid string') es = ep.ElasticSearch(host=self.elasticsearch_host, port=self.elasticsearch_port) source = self.clients.resource_registry.read(source_id) # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # If source is a view, catalog or collection go through it and recursively call query_time on all the results in the indexes # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - iterate = self._multi( self.query_time, source, field=field, from_value=from_value, to_value=to_value, order=order, limit=limit, offset=offset, id_only=id_only, ) if iterate is not None: return iterate index = source validate_is_instance(index, ElasticSearchIndex, "%s does not refer to a valid index." % source_id) if order: validate_is_instance(order, dict, "Order is incorrect.") es.sort(**order) if limit: es.size(limit) if field == "*": field = "_all" if from_value is not None: from_value = calendar.timegm(dateutil.parser.parse(from_value).timetuple()) * 1000 if to_value is not None: to_value = calendar.timegm(dateutil.parser.parse(to_value).timetuple()) * 1000 query = ep.ElasticQuery.range(field=field, from_value=from_value, to_value=to_value) response = IndexManagementService._es_call(es.search_index_advanced, index.index_name, query) IndexManagementService._check_response(response) return self._results_from_response(response, id_only)
def query_term( self, source_id="", field="", value="", fuzzy=False, match=False, order=None, limit=0, offset=0, id_only=False ): """ Elasticsearch Query against an index > discovery.query_index('indexID', 'name', '*', order={'name':'asc'}, limit=20, id_only=False) """ if not self.use_es: raise BadRequest("Can not make queries without ElasticSearch, enable system.elasticsearch to make queries.") validate_true(source_id, "Unspecified source_id") validate_true(field, "Unspecified field") validate_true(value, "Unspecified value") es = ep.ElasticSearch(host=self.elasticsearch_host, port=self.elasticsearch_port) source = self.clients.resource_registry.read(source_id) # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # If source is a view, catalog or collection go through it and recursively call query_range on all the results in the indexes # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - iterate = self._multi( self.query_term, source, field=field, value=value, order=order, limit=limit, offset=offset, id_only=id_only ) if iterate is not None: return iterate index = source validate_is_instance(index, ElasticSearchIndex, "%s does not refer to a valid index." % index) if order: validate_is_instance(order, dict, "Order is incorrect.") es.sort(**order) if limit: es.size(limit) if offset: es.from_offset(offset) if field == "*": field = "_all" if fuzzy: query = ep.ElasticQuery.fuzzy_like_this(value, fields=[field]) elif match: match_query = ep.ElasticQuery.match(field=field, query=value) query = {"match_phrase_prefix": match_query["match"]} elif "*" in value: query = ep.ElasticQuery.wildcard(field=field, value=value) else: query = ep.ElasticQuery.field(field=field, query=value) response = IndexManagementService._es_call(es.search_index_advanced, index.index_name, query) IndexManagementService._check_response(response) return self._results_from_response(response, id_only)
def query_geo_bbox( self, source_id="", field="", top_left=None, bottom_right=None, order=None, limit=0, offset=0, id_only=False ): validate_true(isinstance(top_left, (list, tuple)), "Top Left is not a list or a tuple") validate_true(len(top_left) == 2, "Top Left is not of the right size: (2)") validate_true(isinstance(bottom_right, (list, tuple)), "Bottom Right is not a list or a tuple") validate_true(len(bottom_right) == 2, "Bottom Right is not of the right size: (2)") if not self.use_es: raise BadRequest("Can not make queries without ElasticSearch, enable in res/config/pyon.yml") es = ep.ElasticSearch(host=self.elasticsearch_host, port=self.elasticsearch_port) source = self.clients.resource_registry.read(source_id) iterate = self._multi( self.query_geo_bbox, source=source, field=field, top_left=top_left, bottom_right=bottom_right, order=order, limit=limit, offset=offset, id_only=id_only, ) if iterate is not None: return iterate index = source validate_is_instance(index, ElasticSearchIndex, "%s does not refer to a valid index." % index) sorts = ep.ElasticSort() if order is not None and isinstance(order, dict): sort_field = order.keys()[0] value = order[sort_field] sorts.sort(sort_field, value) es.sorted(sorts) if limit: es.size(limit) if offset: es.from_offset(offset) if field == "*": field = "_all" filter = ep.ElasticFilter.geo_bounding_box(field, top_left, bottom_right) es.filtered(filter) query = ep.ElasticQuery.match_all() response = IndexManagementService._es_call(es.search_index_advanced, index.index_name, query) IndexManagementService._check_response(response) return self._results_from_response(response, id_only)
def query_term(self, source_id='', field='', value='', fuzzy=False, match=False, order=None, limit=0, offset=0, id_only=False): ''' Elasticsearch Query against an index > discovery.query_index('indexID', 'name', '*', order={'name':'asc'}, limit=20, id_only=False) ''' if not self.use_es: raise BadRequest('Can not make queries without ElasticSearch, enable system.elasticsearch to make queries.') validate_true(source_id, 'Unspecified source_id') validate_true(field, 'Unspecified field') validate_true(value, 'Unspecified value') es = ep.ElasticSearch(host=self.elasticsearch_host, port=self.elasticsearch_port) source = self.clients.resource_registry.read(source_id) #- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # If source is a view, catalog or collection go through it and recursively call query_range on all the results in the indexes #- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - iterate = self._multi(self.query_term, source, field=field, value=value, order=order, limit=limit, offset=offset, id_only=id_only) if iterate is not None: return iterate index = source validate_is_instance(index, ElasticSearchIndex, '%s does not refer to a valid index.' % index) if order: validate_is_instance(order,dict, 'Order is incorrect.') es.sort(**order) if limit: es.size(limit) if offset: es.from_offset(offset) if field == '*': field = '_all' if fuzzy: query = ep.ElasticQuery.fuzzy_like_this(value, fields=[field]) elif match: match_query = ep.ElasticQuery.match(field=field,query=value) query = {"match_phrase_prefix":match_query['match']} elif '*' in value: query = ep.ElasticQuery.wildcard(field=field, value=value) else: query = ep.ElasticQuery.field(field=field, query=value) response = IndexManagementService._es_call(es.search_index_advanced,index.index_name,query) IndexManagementService._check_response(response) return self._results_from_response(response, id_only)
def es_cleanup(): es_host = CFG.get_safe('server.elasticsearch.host', 'localhost') es_port = CFG.get_safe('server.elasticsearch.port', '9200') es = ep.ElasticSearch(host=es_host, port=es_port, timeout=10) indexes = STD_INDEXES.keys() indexes.append('%s_resources_index' % get_sys_name().lower()) indexes.append('%s_events_index' % get_sys_name().lower()) for index in indexes: IndexManagementService._es_call(es.river_couchdb_delete, index) IndexManagementService._es_call(es.index_delete, index)
def es_cleanup(): es_host = CFG.get_safe("server.elasticsearch.host", "localhost") es_port = CFG.get_safe("server.elasticsearch.port", "9200") es = ep.ElasticSearch(host=es_host, port=es_port, timeout=10) indexes = STD_INDEXES.keys() indexes.append("%s_resources_index" % get_sys_name().lower()) indexes.append("%s_events_index" % get_sys_name().lower()) for index in indexes: IndexManagementService._es_call(es.river_couchdb_delete, index) IndexManagementService._es_call(es.index_delete, index)
def query_range( self, source_id="", field="", from_value=None, to_value=None, order=None, limit=0, offset=0, id_only=False ): if not self.use_es: raise BadRequest("Can not make queries without ElasticSearch, enable in res/config/pyon.yml") validate_true(not from_value is None, "from_value not specified") validate_true(isinstance(from_value, int) or isinstance(from_value, float), "from_value is not a valid number") validate_true(not to_value is None, "to_value not specified") validate_true(isinstance(to_value, int) or isinstance(to_value, float), "to_value is not a valid number") validate_true(source_id, "source_id not specified") es = ep.ElasticSearch(host=self.elasticsearch_host, port=self.elasticsearch_port) source = self.clients.resource_registry.read(source_id) # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # If source is a view, catalog or collection go through it and recursively call query_range on all the results in the indexes # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - iterate = self._multi( self.query_range, source, field=field, from_value=from_value, to_value=to_value, order=order, limit=limit, offset=offset, id_only=id_only, ) if iterate is not None: return iterate index = source validate_is_instance(index, ElasticSearchIndex, "%s does not refer to a valid index." % source_id) if order: validate_is_instance(order, dict, "Order is incorrect.") es.sort(**order) if limit: es.size(limit) if field == "*": field = "_all" query = ep.ElasticQuery().range(field=field, from_value=from_value, to_value=to_value) response = IndexManagementService._es_call(es.search_index_advanced, index.index_name, query) IndexManagementService._check_response(response) return self._results_from_response(response, id_only)
def query_time(self, source_id='', field='', from_value=None, to_value=None, order=None, limit=0, offset=0, id_only=False): if not self.use_es: raise BadRequest('Can not make queries without ElasticSearch, enable in res/config/pyon.yml') if from_value is not None: validate_is_instance(from_value,basestring,'"From" is not a valid string (%s)' % from_value) if to_value is not None: validate_is_instance(to_value,basestring,'"To" is not a valid string') es = ep.ElasticSearch(host=self.elasticsearch_host, port=self.elasticsearch_port) source = self.clients.resource_registry.read(source_id) #- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # If source is a view, catalog or collection go through it and recursively call query_time on all the results in the indexes #- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - iterate = self._multi(self.query_time, source, field=field, from_value=from_value, to_value=to_value, order=order, limit=limit, offset=offset, id_only=id_only) if iterate is not None: return iterate index = source validate_is_instance(index,ElasticSearchIndex,'%s does not refer to a valid index.' % source_id) if order: validate_is_instance(order,dict,'Order is incorrect.') es.sort(**order) if limit: es.size(limit) if field == '*': field = '_all' if from_value is not None: from_value = calendar.timegm(dateutil.parser.parse(from_value).timetuple()) * 1000 if to_value is not None: to_value = calendar.timegm(dateutil.parser.parse(to_value).timetuple()) * 1000 query = ep.ElasticQuery.range( field = field, from_value = from_value, to_value = to_value ) response = IndexManagementService._es_call(es.search_index_advanced,index.index_name,query) IndexManagementService._check_response(response) return self._results_from_response(response, id_only)
def wipe(self): for index in STD_INDEXES.iterkeys(): IndexManagementService._es_call(self.es.index_delete,index) IndexManagementService._es_call(self.es.river_couchdb_delete,index) for index in EDGE_INDEXES.iterkeys(): IndexManagementService._es_call(self.es.index_delete,index) IndexManagementService._es_call(self.es.river_couchdb_delete,index)
def query_geo_distance( self, source_id="", field="", origin=None, distance="", units="mi", order=None, limit=0, offset=0, id_only=False ): validate_true(isinstance(origin, (tuple, list)), "Origin is not a list or tuple.") validate_true(len(origin) == 2, "Origin is not of the right size: (2)") if not self.use_es: raise BadRequest("Can not make queries without ElasticSearch, enable in res/config/pyon.yml") es = ep.ElasticSearch(host=self.elasticsearch_host, port=self.elasticsearch_port) source = self.clients.resource_registry.read(source_id) iterate = self._multi(self.query_geo_distance, source=source, field=field, origin=origin, distance=distance) if iterate is not None: return iterate index = source validate_is_instance(index, ElasticSearchIndex, "%s does not refer to a valid index." % index) sorts = ep.ElasticSort() if order is not None and isinstance(order, dict): sort_field = order.keys()[0] value = order[sort_field] sorts.sort(sort_field, value) es.sorted(sorts) if limit: es.size(limit) if offset: es.from_offset(offset) if field == "*": field = "_all" sorts.geo_distance(field, origin, units) es.sorted(sorts) filter = ep.ElasticFilter.geo_distance(field, origin, "%s%s" % (distance, units)) es.filtered(filter) query = ep.ElasticQuery.match_all() response = IndexManagementService._es_call(es.search_index_advanced, index.index_name, query) IndexManagementService._check_response(response) return self._results_from_response(response, id_only)
def query_geo_distance(self, source_id='', field='', origin=None, distance='', units='mi',order=None, limit=0, offset=0, id_only=False): validate_true(isinstance(origin,(tuple,list)) , 'Origin is not a list or tuple.') validate_true(len(origin)==2, 'Origin is not of the right size: (2)') if not self.use_es: raise BadRequest('Can not make queries without ElasticSearch, enable in res/config/pyon.yml') es = ep.ElasticSearch(host=self.elasticsearch_host, port=self.elasticsearch_port) source = self.clients.resource_registry.read(source_id) iterate = self._multi(self.query_geo_distance, source=source, field=field, origin=origin, distance=distance) if iterate is not None: return iterate index = source validate_is_instance(index,ElasticSearchIndex, '%s does not refer to a valid index.' % index) sorts = ep.ElasticSort() if order is not None and isinstance(order,dict): sort_field = order.keys()[0] value = order[sort_field] sorts.sort(sort_field,value) es.sorted(sorts) if limit: es.size(limit) if offset: es.from_offset(offset) if field == '*': field = '_all' sorts.geo_distance(field, origin, units) es.sorted(sorts) filter = ep.ElasticFilter.geo_distance(field,origin, '%s%s' %(distance,units)) es.filtered(filter) query = ep.ElasticQuery.match_all() response = IndexManagementService._es_call(es.search_index_advanced,index.index_name,query) IndexManagementService._check_response(response) return self._results_from_response(response,id_only)
def setUp(self): mock_clients = self._create_service_mock('index_management') self.index_management = IndexManagementService() self.index_management.clients = mock_clients self.rr_create = mock_clients.resource_registry.create self.rr_read = mock_clients.resource_registry.read self.rr_update = mock_clients.resource_registry.update self.rr_delete = mock_clients.resource_registry.delete self.rr_find_resources = mock_clients.resource_registry.find_resources self.rr_find_assocs = mock_clients.resource_registry.find_associations self.rr_find_subj = mock_clients.resource_registry.find_subjects self.rr_find_obj = mock_clients.resource_registry.find_objects self.rr_delete_assoc = mock_clients.resource_registry.delete_association self.get_datastore = Mock() self.db_create = Mock() self.get_datastore.return_value = DotDict({'datastore_name':'test_datastore'}) self.index_management.container = DotDict({ 'datastore_manager':DotDict({ 'get_datastore' : self.get_datastore }) }) self.index_management.elasticsearch_host = 'notarealhost' self.index_management.elasticsearch_port = 9000 self.index_name = 'test_index'
def wipe(self): for index in STD_INDEXES.iterkeys(): IndexManagementService._es_call(self.es.index_delete, index) IndexManagementService._es_call(self.es.river_couchdb_delete, index) for index in EDGE_INDEXES.iterkeys(): IndexManagementService._es_call(self.es.index_delete, index) IndexManagementService._es_call(self.es.river_couchdb_delete, index)
def query_range(self, source_id='', field='', from_value=None, to_value=None, order=None, limit=0, offset=0, id_only=False): if not self.use_es: raise BadRequest('Can not make queries without ElasticSearch, enable in res/config/pyon.yml') validate_true(not from_value is None, 'from_value not specified') validate_true(isinstance(from_value,int) or isinstance(from_value,float), 'from_value is not a valid number') validate_true(not to_value is None, 'to_value not specified') validate_true(isinstance(to_value,int) or isinstance(to_value,float), 'to_value is not a valid number') validate_true(source_id, 'source_id not specified') es = ep.ElasticSearch(host=self.elasticsearch_host, port=self.elasticsearch_port) source = self.clients.resource_registry.read(source_id) #- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # If source is a view, catalog or collection go through it and recursively call query_range on all the results in the indexes #- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - iterate = self._multi(self.query_range, source, field=field, from_value=from_value, to_value=to_value, order=order, limit=limit, offset=offset, id_only=id_only) if iterate is not None: return iterate index = source validate_is_instance(index,ElasticSearchIndex,'%s does not refer to a valid index.' % source_id) if order: validate_is_instance(order,dict,'Order is incorrect.') es.sort(**order) if limit: es.size(limit) if field == '*': field = '_all' query = ep.ElasticQuery().range( field = field, from_value = from_value, to_value = to_value ) response = IndexManagementService._es_call(es.search_index_advanced,index.index_name,query) IndexManagementService._check_response(response) return self._results_from_response(response, id_only)
def query_term(self, source_id='', field='', value='', order=None, limit=0, offset=0, id_only=False): ''' Elasticsearch Query against an index > discovery.query_index('indexID', 'name', '*', order={'name':'asc'}, limit=20, id_only=False) ''' if not self.use_es: raise BadRequest('Can not make queries without ElasticSearch, enable system.elasticsearch to make queries.') validate_true(source_id, 'Unspecified source_id') validate_true(field, 'Unspecified field') validate_true(value, 'Unspecified value') es = ep.ElasticSearch(host=self.elasticsearch_host, port=self.elasticsearch_port) source = self.clients.resource_registry.read(source_id) #- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # If source is a view, catalog or collection go through it and recursively call query_range on all the results in the indexes #- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - iterate = self._multi(self.query_term, source, field=field, value=value, order=order, limit=limit, offset=offset, id_only=id_only) if iterate is not None: return iterate index = source validate_is_instance(index, ElasticSearchIndex, '%s does not refer to a valid index.' % index) if order: validate_is_instance(order,dict, 'Order is incorrect.') es.sort(**order) if limit: es.size(limit) if offset: es.from_offset(offset) if field == '*': field = '_all' query = ep.ElasticQuery().wildcard(field=field, value=value) response = IndexManagementService._es_call(es.search_index_advanced,index.index_name,query) IndexManagementService._check_response(response) return self._results_from_response(response, id_only)
def query_geo_bbox(self, source_id='', field='', top_left=None, bottom_right=None, order=None, limit=0, offset=0, id_only=False): validate_true(isinstance(top_left, (list,tuple)), 'Top Left is not a list or a tuple') validate_true(len(top_left)==2, 'Top Left is not of the right size: (2)') validate_true(isinstance(bottom_right, (list,tuple)), 'Bottom Right is not a list or a tuple') validate_true(len(bottom_right)==2, 'Bottom Right is not of the right size: (2)') if not self.use_es: raise BadRequest('Can not make queries without ElasticSearch, enable in res/config/pyon.yml') es = ep.ElasticSearch(host=self.elasticsearch_host, port=self.elasticsearch_port) source = self.clients.resource_registry.read(source_id) iterate = self._multi(self.query_geo_bbox, source=source, field=field, top_left=top_left, bottom_right=bottom_right, order=order, limit=limit, offset=offset, id_only=id_only) if iterate is not None: return iterate index = source validate_is_instance(index,ElasticSearchIndex, '%s does not refer to a valid index.' % index) sorts = ep.ElasticSort() if order is not None and isinstance(order,dict): sort_field = order.keys()[0] value = order[sort_field] sorts.sort(sort_field,value) es.sorted(sorts) if limit: es.size(limit) if offset: es.from_offset(offset) if field == '*': field = '_all' filter = ep.ElasticFilter.geo_bounding_box(field, top_left, bottom_right) es.filtered(filter) query = ep.ElasticQuery.match_all() response = IndexManagementService._es_call(es.search_index_advanced,index.index_name,query) IndexManagementService._check_response(response) return self._results_from_response(response,id_only)
def clean_bootstrap(self): for k, v in STD_INDEXES.iteritems(): IndexManagementService._es_call(self.es.river_couchdb_delete, k) IndexManagementService._es_call(self.es.index_delete, k) for k, v in EDGE_INDEXES.iteritems(): IndexManagementService._es_call(self.es.river_couchdb_delete, k) IndexManagementService._es_call(self.es.index_delete, k) self.index_bootstrap()
def clean_bootstrap(self): for k,v in STD_INDEXES.iteritems(): IndexManagementService._es_call(self.es.river_couchdb_delete,k) IndexManagementService._es_call(self.es.index_delete,k) for k,v in EDGE_INDEXES.iteritems(): IndexManagementService._es_call(self.es.river_couchdb_delete,k) IndexManagementService._es_call(self.es.index_delete,k) self.index_bootstrap()
def test_bootstrap(self): cc = self.container #======================================= # Clean indexes #======================================= config = CFG config.op = 'index_bootstrap' # Thankfully, the default system.force_clean for integration tests is False :) cc.spawn_process(name='index_bootstrap', module='ion.processes.bootstrap.index_bootstrap', cls='IndexBootStrap', config=config) index_list = IndexManagementService._es_call(self.es.index_list) for index in STD_INDEXES.iterkeys(): self.assertTrue(index in index_list)
def test_bootstrap(self): cc = self.container #======================================= # Clean indexes #======================================= config = CFG config.op='index_bootstrap' cc.spawn_process( name='index_bootstrap', module='ion.processes.bootstrap.index_bootstrap', cls='IndexBootStrap', config=config ) index_list = IndexManagementService._es_call(self.es.index_list) for index in STD_INDEXES.iterkeys(): self.assertTrue(index in index_list)
def test_bootstrap(self): cc = self.container #======================================= # Clean indexes #======================================= config = CFG config.op='index_bootstrap' # Thankfully, the default system.force_clean for integration tests is False :) cc.spawn_process( name='index_bootstrap', module='ion.processes.bootstrap.index_bootstrap', cls='IndexBootStrap', config=config ) index_list = IndexManagementService._es_call(self.es.index_list) for index in STD_INDEXES.iterkeys(): self.assertTrue(index in index_list)
def test_clean_bootstrap(self): cc = self.container IndexManagementService._es_call(self.es.index_delete,'%s_sites_index' % get_sys_name()) response = IndexManagementService._es_call(self.es.index_create,'%s_sites_index' % get_sys_name()) # Force a conflict IndexManagementService._check_response(response) config = CFG config.op='clean_bootstrap' cc.spawn_process( name='index_bootstrap', module='ion.processes.bootstrap.index_bootstrap', cls='IndexBootStrap', config=config ) index_list = IndexManagementService._es_call(self.es.index_list) for index in STD_INDEXES.iterkeys(): self.assertTrue(index in index_list)
def test_clean_bootstrap(self): cc = self.container IndexManagementService._es_call(self.es.index_delete, '%s_sites_index' % get_sys_name()) response = IndexManagementService._es_call( self.es.index_create, '%s_sites_index' % get_sys_name()) # Force a conflict IndexManagementService._check_response(response) config = CFG config.op = 'clean_bootstrap' # Thankfully, the default system.force_clean for integration tests is False :) cc.spawn_process(name='index_bootstrap', module='ion.processes.bootstrap.index_bootstrap', cls='IndexBootStrap', config=config) index_list = IndexManagementService._es_call(self.es.index_list) for index in STD_INDEXES.iterkeys(): self.assertTrue(index in index_list)
def query_vertical_bounds(self, source_id='', field='', from_value=None, to_value=None, order=None, limit=0, offset=0, id_only=False): if from_value is not None: validate_is_instance(from_value,float,'"From" is not a valid float (%s)' % from_value) if to_value is not None: validate_is_instance(to_value,float,'"To" is not a valid float') es = ep.ElasticSearch(host=self.elasticsearch_host, port=self.elasticsearch_port) source = self.clients.resource_registry.read(source_id) #- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # If source is a view, catalog or collection go through it and recursively call query_time on all the results in the indexes #- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - iterate = self._multi(self.query_time, source, field=field, from_value=from_value, to_value=to_value, order=order, limit=limit, offset=offset, id_only=id_only) if iterate is not None: return iterate index = source validate_is_instance(index,ElasticSearchIndex,'%s does not refer to a valid index.' % source_id) if order: validate_is_instance(order,dict,'Order is incorrect.') es.sort(**order) if field == '*': field = '_all' vertical_min = 'geospatial_vertical_min' vertical_max = 'geospatial_vertical_max' else: vertical_min = '%s.geospatial_vertical_min' % field vertical_max = '%s.geospatial_vertical_max' % field query = { "query": { "match_all": {} }, "filter": { "and": [ { "or": [ { "range": { vertical_min: { "gte": from_value } } }, { "range": { vertical_max: { "gte": from_value } } } ] }, { "or": [ { "range": { vertical_min: { "lte": to_value } } }, { "range": { vertical_max: { "lte": to_value } } } ] } ] } } if limit: query['size'] = limit if offset: query['from'] = offset response = IndexManagementService._es_call(es.raw_query,'%s/_search' % index.index_name,method='POST', data=query, host=self.elasticsearch_host, port=self.elasticsearch_port) IndexManagementService._check_response(response) retval= self._results_from_response(response, id_only) return retval
class IndexManagementUnitTest(PyonTestCase): def setUp(self): mock_clients = self._create_service_mock('index_management') self.index_management = IndexManagementService() self.index_management.clients = mock_clients self.rr_create = mock_clients.resource_registry.create self.rr_read = mock_clients.resource_registry.read self.rr_update = mock_clients.resource_registry.update self.rr_delete = mock_clients.resource_registry.delete self.rr_find_resources = mock_clients.resource_registry.find_resources self.rr_find_assocs = mock_clients.resource_registry.find_associations self.rr_find_subj = mock_clients.resource_registry.find_subjects self.rr_find_obj = mock_clients.resource_registry.find_objects self.rr_delete_assoc = mock_clients.resource_registry.delete_association self.get_datastore = Mock() self.db_create = Mock() self.get_datastore.return_value = DotDict({'datastore_name':'test_datastore'}) self.index_management.container = DotDict({ 'datastore_manager':DotDict({ 'get_datastore' : self.get_datastore }) }) self.index_management.elasticsearch_host = 'notarealhost' self.index_management.elasticsearch_port = 9000 self.index_name = 'test_index' def test_create_index(self): ''' test_create_index Unit test for basic creation of an index ''' # Mocks self.rr_create.return_value = ('index_id','rev') self.rr_find_resources.return_value = ([],[]) retval = self.index_management.create_index(name='mock', content_type=IndexManagementService.ELASTICSEARCH_INDEX, options='ugh') self.assertTrue(retval=='index_id','invalid return value: %s' % retval) self.assertTrue(self.rr_create.called) retval = self.index_management.create_index(name='argh', content_type=IndexManagementService.COUCHDB_INDEX) self.assertTrue(retval=='index_id','invalid return value: %s' % retval) with self.assertRaises(BadRequest): self.index_management.create_index(name='another', content_type='not_listed') def test_dup_index(self): # Mocks self.rr_find_resources.return_value = ([1],[1]) # Execution with self.assertRaises(BadRequest): self.index_management.create_index('mock_index_id') def test_read_index(self): # mocks return_obj = dict(mock='mock') self.rr_read.return_value = return_obj # execution retval = self.index_management.read_index('mock_index_id') # assertions self.assertEquals(return_obj, retval, 'The resource should be returned.') def test_update_index(self): with self.assertRaises(BadRequest): self.index_management.update_index() with self.assertRaises(BadRequest): self.index_management.update_index('hi') self.index_management.update_index(Index()) def test_delete_index(self): self.index_management.delete_index('index_id') self.rr_delete.assert_called_with('index_id') def test_list_indexes(self): # Mocks self.rr_find_resources.return_value = ([ DotDict({'_id':'1','name':'1'}), DotDict({'_id':'2','name':'2'}), DotDict({'_id':'3','name':'3'}), DotDict({'_id':'4','name':'4'}) ],[1,2,3,4]) # Execution retval = self.index_management.list_indexes() # Assertions self.assertTrue(retval == {'1':'1','2':'2','3':'3','4':'4'}, 'Index mismatch') def test_find_indexes(self): self.index_management.list_indexes=Mock() self.index_management.list_indexes.return_value = {'index_name':'1'} retval = self.index_management.find_indexes('index_name') self.assertTrue(retval=='1') self.index_management.list_indexes.return_value = {} retval = self.index_management.find_indexes('index_name') self.assertTrue(retval==None) def test_create_collection(self): self.rr_create.return_value = 'collection_id', 'rev' self.rr_find_resources.return_value = ([0],[0]) with self.assertRaises(BadRequest): self.index_management.create_collection('test',[0]) self.rr_find_resources.return_value = ([],[]) with self.assertRaises(BadRequest): self.index_management.create_collection('test',[]) retval = self.index_management.create_collection('test',[0]) self.assertTrue(retval=='collection_id') def test_read_collection(self): self.rr_read.return_value = 'retval' retval = self.index_management.read_collection('test') self.assertTrue(retval=='retval') def test_update_collection(self): with self.assertRaises(BadRequest): ind = Index() self.index_management.update_collection(ind) self.index_management.update_collection(Collection()) self.assertTrue(self.rr_update.called) def test_delete_collection(self): self.rr_find_assocs.return_value = ['assoc'] retval = self.index_management.delete_collection('collection_id') self.assertTrue(retval) self.rr_delete.assert_called_once_with('collection_id') self.rr_delete_assoc.assert_called_once_with('assoc') def test_list_collection_resources(self): self.rr_find_obj.return_value = (['test_id'],['']) result1 = self.index_management.list_collection_resources('collection_id', id_only=True) self.assertTrue(result1 == ['test_id']) def test_find_collection(self): self.rr_find_resources.return_value = (['test'],[]) retval = self.index_management.find_collection(collection_name='hi') self.assertTrue(retval == ['test'] , '%s' % retval) fake_collection = Collection(resources=['test_res_id']) fake_assoc = Association(s='test_id') self.rr_find_assocs.return_value = [fake_assoc] retval = self.index_management.find_collection(resource_ids=['test_res_id']) self.assertTrue(retval == ['test_id'], '%s' % retval) with self.assertRaises(BadRequest): self.index_management.find_collection()
def query_time_bounds(self, source_id='', field='', from_value=None, to_value=None, order=None, limit=0, offset=0, id_only=False): if from_value is not None: validate_is_instance(from_value,basestring,'"From" is not a valid string (%s)' % from_value) if to_value is not None: validate_is_instance(to_value,basestring,'"To" is not a valid string') es = ep.ElasticSearch(host=self.elasticsearch_host, port=self.elasticsearch_port) source = self.clients.resource_registry.read(source_id) #- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # If source is a view, catalog or collection go through it and recursively call query_time on all the results in the indexes #- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - iterate = self._multi(self.query_time, source, field=field, from_value=from_value, to_value=to_value, order=order, limit=limit, offset=offset, id_only=id_only) if iterate is not None: return iterate index = source validate_is_instance(index,ElasticSearchIndex,'%s does not refer to a valid index.' % source_id) if order: validate_is_instance(order,dict,'Order is incorrect.') es.sort(**order) if field == '*': field = '_all' start_time = 'start_datetime' end_time = 'end_datetime' else: start_time = '%s.start_datetime' % field end_time = '%s.end_datetime' % field if from_value is not None: from_value = calendar.timegm(dateutil.parser.parse(from_value).timetuple()) * 1000 if to_value is not None: to_value = calendar.timegm(dateutil.parser.parse(to_value).timetuple()) * 1000 query = { "query": { "match_all": {} }, "filter": { "and": [ { "or": [ { "range": { start_time: { "gte": from_value } } }, { "range": { end_time: { "gte": from_value } } } ] }, { "or": [ { "range": { start_time: { "lte": to_value } } }, { "range": { end_time: { "lte": to_value } } } ] } ] } } if limit: query['size'] = limit if offset: query['from'] = offset response = IndexManagementService._es_call(es.raw_query,'%s/_search' % index.index_name,method='POST', data=query, host=self.elasticsearch_host, port=self.elasticsearch_port) IndexManagementService._check_response(response) return self._results_from_response(response, id_only)
class IndexManagementUnitTest(PyonTestCase): def setUp(self): mock_clients = self._create_service_mock('index_management') self.index_management = IndexManagementService() self.index_management.clients = mock_clients self.rr_create = mock_clients.resource_registry.create self.rr_read = mock_clients.resource_registry.read self.rr_update = mock_clients.resource_registry.update self.rr_delete = mock_clients.resource_registry.delete self.rr_find_resources = mock_clients.resource_registry.find_resources self.rr_find_assocs = mock_clients.resource_registry.find_associations self.rr_find_subj = mock_clients.resource_registry.find_subjects self.rr_find_obj = mock_clients.resource_registry.find_objects self.rr_delete_assoc = mock_clients.resource_registry.delete_association self.get_datastore = Mock() self.db_create = Mock() self.get_datastore.return_value = DotDict( {'datastore_name': 'test_datastore'}) self.index_management.container = DotDict({ 'datastore_manager': DotDict({'get_datastore': self.get_datastore}) }) self.index_name = 'test_index' def test_create_index(self): ''' test_create_index Unit test for basic creation of an index ''' # Mocks self.rr_create.return_value = ('index_id', 'rev') self.rr_find_resources.return_value = ([], []) retval = self.index_management.create_index( name='mock', content_type=IndexManagementService.DATASTORE_INDEX, options='ugh') self.assertTrue(retval == 'index_id', 'invalid return value: %s' % retval) self.assertTrue(self.rr_create.called) with self.assertRaises(BadRequest): self.index_management.create_index(name='another', content_type='not_listed') def test_dup_index(self): # Mocks self.rr_find_resources.return_value = ([1], [1]) # Execution with self.assertRaises(BadRequest): self.index_management.create_index('mock_index_id') def test_read_index(self): # mocks return_obj = dict(mock='mock') self.rr_read.return_value = return_obj # execution retval = self.index_management.read_index('mock_index_id') # assertions self.assertEquals(return_obj, retval, 'The resource should be returned.') def test_update_index(self): with self.assertRaises(BadRequest): self.index_management.update_index() with self.assertRaises(BadRequest): self.index_management.update_index('hi') self.index_management.update_index(Index()) def test_delete_index(self): self.index_management.delete_index('index_id') self.rr_delete.assert_called_with('index_id') def test_list_indexes(self): # Mocks self.rr_find_resources.return_value = ([ DotDict({ '_id': '1', 'name': '1' }), DotDict({ '_id': '2', 'name': '2' }), DotDict({ '_id': '3', 'name': '3' }), DotDict({ '_id': '4', 'name': '4' }) ], [1, 2, 3, 4]) # Execution retval = self.index_management.list_indexes() # Assertions self.assertTrue(retval == { '1': '1', '2': '2', '3': '3', '4': '4' }, 'Index mismatch') def test_find_indexes(self): self.index_management.list_indexes = Mock() self.index_management.list_indexes.return_value = {'index_name': '1'} retval = self.index_management.find_indexes('index_name') self.assertTrue(retval == '1') self.index_management.list_indexes.return_value = {} retval = self.index_management.find_indexes('index_name') self.assertTrue(retval == None) def test_create_collection(self): self.rr_create.return_value = 'collection_id', 'rev' self.rr_find_resources.return_value = ([0], [0]) with self.assertRaises(BadRequest): self.index_management.create_collection('test', [0]) self.rr_find_resources.return_value = ([], []) with self.assertRaises(BadRequest): self.index_management.create_collection('test', []) retval = self.index_management.create_collection('test', [0]) self.assertTrue(retval == 'collection_id') def test_read_collection(self): self.rr_read.return_value = 'retval' retval = self.index_management.read_collection('test') self.assertTrue(retval == 'retval') def test_update_collection(self): with self.assertRaises(BadRequest): ind = Index() self.index_management.update_collection(ind) self.index_management.update_collection(Collection()) self.assertTrue(self.rr_update.called) def test_delete_collection(self): self.rr_find_assocs.return_value = ['assoc'] retval = self.index_management.delete_collection('collection_id') self.assertTrue(retval) self.rr_delete.assert_called_once_with('collection_id') self.rr_delete_assoc.assert_called_once_with('assoc') def test_list_collection_resources(self): self.rr_find_obj.return_value = (['test_id'], ['']) result1 = self.index_management.list_collection_resources( 'collection_id', id_only=True) self.assertTrue(result1 == ['test_id']) def test_find_collection(self): self.rr_find_resources.return_value = (['test'], []) retval = self.index_management.find_collection(collection_name='hi') self.assertTrue(retval == ['test'], '%s' % retval) fake_collection = Collection(resources=['test_res_id']) fake_assoc = Association(s='test_id') self.rr_find_assocs.return_value = [fake_assoc] retval = self.index_management.find_collection( resource_ids=['test_res_id']) self.assertTrue(retval == ['test_id'], '%s' % retval) with self.assertRaises(BadRequest): self.index_management.find_collection()
def index_bootstrap(self): ''' Creates the initial set of desired indexes based on a standard definition ''' #======================================================================================= # Create the _river index based on the cluster configurations #======================================================================================= IndexManagementService._es_call( self.es.index_create, '_river', number_of_shards = self.river_shards, number_of_replicas = self.river_replicas ) filters = { '_id' : '_design/filters', 'filters' : { } } #======================================================================================= # For each of the resource types in the list of values for each standard index, # create a mapping and a context type in ElasticSearch based on the searchable fields. #======================================================================================= for k,v in STD_INDEXES.iteritems(): response = IndexManagementService._es_call( self.es.index_create, k, number_of_shards=self.index_shards, number_of_replicas=self.index_replicas ) IndexManagementService._check_response(response) body = 'function(doc, req) { switch(doc.type_) { default: return false; }}' for res in v: body = re.sub(r'default:', 'case "%s": return true; default:' % res, body) mappings = self.es_mapping(res) response = IndexManagementService._es_call(self.es.raw,'%s/%s/_mapping' %(k,res), 'POST', mappings) IndexManagementService._check_response(response) filters['filters'][k] = body #======================================================================================= # Get an instance of the datastore instance used to create the CouchDB filters # in support of the ElasticSearch river's filter # - Allows us to filter based on resource type #======================================================================================= cc = self.container db = cc.datastore_manager.get_datastore('resources') datastore_name = db.datastore_name db = db.server[datastore_name] db.create(filters) #-------------------------------------------------------------------------------- # Create the river connection between CouchDB and ElasticSearch #-------------------------------------------------------------------------------- for k,v in STD_INDEXES.iteritems(): response = IndexManagementService._es_call(self.es.river_couchdb_create, index_name = k, couchdb_db = datastore_name, couchdb_host = CFG.server.couchdb.host, couchdb_port = CFG.server.couchdb.port, couchdb_user = CFG.server.couchdb.username, couchdb_password = CFG.server.couchdb.password, couchdb_filter = 'filters/%s' % k, script= ELASTICSEARCH_CONTEXT_SCRIPT ) IndexManagementService._check_response(response) #======================================================================================= # Create and map the edge indexes #======================================================================================= #-------------------------------------------------------------------------------- # Resources Index #-------------------------------------------------------------------------------- response = IndexManagementService._es_call(self.es.index_create,'%s_resources_index' % self.sysname, number_of_shards=self.index_shards, number_of_replicas=self.index_replicas ) IndexManagementService._check_response(response) for t in RT.values(): mappings = self.es_mapping(t) response = IndexManagementService._es_call(self.es.raw,'%s_resources_index/%s/_mapping' % (self.sysname,t), 'POST', mappings) IndexManagementService._check_response(response) response = IndexManagementService._es_call(self.es.river_couchdb_create, index_name = '%s_resources_index' % self.sysname, couchdb_db = datastore_name, couchdb_host = CFG.server.couchdb.host, couchdb_port = CFG.server.couchdb.port, couchdb_user = CFG.server.couchdb.username, couchdb_password = CFG.server.couchdb.password, script= ELASTICSEARCH_CONTEXT_SCRIPT ) IndexManagementService._check_response(response) #-------------------------------------------------------------------------------- # Events Index #-------------------------------------------------------------------------------- response = IndexManagementService._es_call(self.es.index_create,'%s_events_index' % self.sysname, number_of_shards=self.index_shards, number_of_replicas=self.index_replicas ) IndexManagementService._check_response(response) for event in get_events(): mappings = self.es_mapping(event) response = IndexManagementService._es_call(self.es.raw, '%s_events_index/%s/_mapping' % (self.sysname, event), 'POST', mappings) IndexManagementService._check_response(response) response = IndexManagementService._es_call(self.es.river_couchdb_create, index_name = '%s_events_index' % self.sysname, couchdb_db = '%s_events' % self.sysname, couchdb_host = CFG.server.couchdb.host, couchdb_port = CFG.server.couchdb.port, couchdb_user = CFG.server.couchdb.username, couchdb_password = CFG.server.couchdb.password, script= ELASTICSEARCH_CONTEXT_SCRIPT ) IndexManagementService._check_response(response) #======================================================================================= # Construct the resources #======================================================================================= ims_cli = IndexManagementServiceClient() #-------------------------------------------------------------------------------- # Standard Indexes #-------------------------------------------------------------------------------- for index,resources in STD_INDEXES.iteritems(): ims_cli.create_index( name=index, description='%s ElasticSearch Index Resource' % index, content_type=IndexManagementService.ELASTICSEARCH_INDEX, options=self.attr_mapping(resources) ) #-------------------------------------------------------------------------------- # CouchDB Indexes #-------------------------------------------------------------------------------- for index,datastore in COUCHDB_INDEXES.iteritems(): ims_cli.create_index( name=index, description='%s CouchDB Index Resource' % index, content_type=IndexManagementService.COUCHDB_INDEX, datastore_name=datastore ) #-------------------------------------------------------------------------------- # Edge Indexes #-------------------------------------------------------------------------------- ims_cli.create_index( name='%s_resources_index' % self.sysname, description='Resources Index', content_type=IndexManagementService.ELASTICSEARCH_INDEX, options=self.attr_mapping(RT.keys()) ) ims_cli.create_index( name='%s_events_index' % self.sysname, description='Events Index', content_type=IndexManagementService.ELASTICSEARCH_INDEX, options=self.attr_mapping(get_events()) )
class IndexManagementUnitTest(PyonTestCase): def setUp(self): mock_clients = self._create_service_mock("index_management") self.index_management = IndexManagementService() self.index_management.clients = mock_clients self.rr_create = mock_clients.resource_registry.create self.rr_read = mock_clients.resource_registry.read self.rr_update = mock_clients.resource_registry.update self.rr_delete = mock_clients.resource_registry.delete self.rr_find_resources = mock_clients.resource_registry.find_resources self.rr_find_assocs = mock_clients.resource_registry.find_associations self.rr_find_subj = mock_clients.resource_registry.find_subjects self.rr_find_obj = mock_clients.resource_registry.find_objects self.rr_delete_assoc = mock_clients.resource_registry.delete_association self.get_datastore = Mock() self.db_create = Mock() self.get_datastore.return_value = DotDict({"datastore_name": "test_datastore"}) self.index_management.container = DotDict({"datastore_manager": DotDict({"get_datastore": self.get_datastore})}) self.index_management.elasticsearch_host = "notarealhost" self.index_management.elasticsearch_port = 9000 self.index_name = "test_index" def test_create_index(self): """ test_create_index Unit test for basic creation of an index """ # Mocks self.rr_create.return_value = ("index_id", "rev") self.rr_find_resources.return_value = ([], []) retval = self.index_management.create_index( name="mock", content_type=IndexManagementService.ELASTICSEARCH_INDEX, options="ugh" ) self.assertTrue(retval == "index_id", "invalid return value: %s" % retval) self.assertTrue(self.rr_create.called) retval = self.index_management.create_index(name="argh", content_type=IndexManagementService.COUCHDB_INDEX) self.assertTrue(retval == "index_id", "invalid return value: %s" % retval) with self.assertRaises(BadRequest): self.index_management.create_index(name="another", content_type="not_listed") def test_dup_index(self): # Mocks self.rr_find_resources.return_value = ([1], [1]) # Execution with self.assertRaises(BadRequest): self.index_management.create_index("mock_index_id") def test_read_index(self): # mocks return_obj = dict(mock="mock") self.rr_read.return_value = return_obj # execution retval = self.index_management.read_index("mock_index_id") # assertions self.assertEquals(return_obj, retval, "The resource should be returned.") def test_update_index(self): with self.assertRaises(BadRequest): self.index_management.update_index() with self.assertRaises(BadRequest): self.index_management.update_index("hi") self.index_management.update_index(Index()) def test_delete_index(self): self.index_management.delete_index("index_id") self.rr_delete.assert_called_with("index_id") def test_list_indexes(self): # Mocks self.rr_find_resources.return_value = ( [ DotDict({"_id": "1", "name": "1"}), DotDict({"_id": "2", "name": "2"}), DotDict({"_id": "3", "name": "3"}), DotDict({"_id": "4", "name": "4"}), ], [1, 2, 3, 4], ) # Execution retval = self.index_management.list_indexes() # Assertions self.assertTrue(retval == {"1": "1", "2": "2", "3": "3", "4": "4"}, "Index mismatch") def test_find_indexes(self): self.index_management.list_indexes = Mock() self.index_management.list_indexes.return_value = {"index_name": "1"} retval = self.index_management.find_indexes("index_name") self.assertTrue(retval == "1") self.index_management.list_indexes.return_value = {} retval = self.index_management.find_indexes("index_name") self.assertTrue(retval == None) def test_create_collection(self): self.rr_create.return_value = "collection_id", "rev" self.rr_find_resources.return_value = ([0], [0]) with self.assertRaises(BadRequest): self.index_management.create_collection("test", [0]) self.rr_find_resources.return_value = ([], []) with self.assertRaises(BadRequest): self.index_management.create_collection("test", []) retval = self.index_management.create_collection("test", [0]) self.assertTrue(retval == "collection_id") def test_read_collection(self): self.rr_read.return_value = "retval" retval = self.index_management.read_collection("test") self.assertTrue(retval == "retval") def test_update_collection(self): with self.assertRaises(BadRequest): ind = Index() self.index_management.update_collection(ind) self.index_management.update_collection(Collection()) self.assertTrue(self.rr_update.called) def test_delete_collection(self): self.rr_find_assocs.return_value = ["assoc"] retval = self.index_management.delete_collection("collection_id") self.assertTrue(retval) self.rr_delete.assert_called_once_with("collection_id") self.rr_delete_assoc.assert_called_once_with("assoc") def test_list_collection_resources(self): self.rr_find_obj.return_value = (["test_id"], [""]) result1 = self.index_management.list_collection_resources("collection_id", id_only=True) self.assertTrue(result1 == ["test_id"]) def test_find_collection(self): self.rr_find_resources.return_value = (["test"], []) retval = self.index_management.find_collection(collection_name="hi") self.assertTrue(retval == ["test"], "%s" % retval) fake_collection = Collection(resources=["test_res_id"]) fake_assoc = Association(s="test_id") self.rr_find_assocs.return_value = [fake_assoc] retval = self.index_management.find_collection(resource_ids=["test_res_id"]) self.assertTrue(retval == ["test_id"], "%s" % retval) with self.assertRaises(BadRequest): self.index_management.find_collection()
def index_bootstrap(self): ''' Creates the initial set of desired indexes based on a standard definition ''' #======================================================================================= # Create the _river index based on the cluster configurations #======================================================================================= IndexManagementService._es_call(self.es.index_create, '_river', number_of_shards=self.river_shards, number_of_replicas=self.river_replicas) filters = {'_id': '_design/filters', 'filters': {}} #======================================================================================= # For each of the resource types in the list of values for each standard index, # create a mapping and a context type in ElasticSearch based on the searchable fields. #======================================================================================= for k, v in STD_INDEXES.iteritems(): response = IndexManagementService._es_call( self.es.index_create, k, number_of_shards=self.index_shards, number_of_replicas=self.index_replicas) IndexManagementService._check_response(response) body = 'function(doc, req) { switch(doc.type_) { default: return false; }}' for res in v: body = re.sub(r'default:', 'case "%s": return true; default:' % res, body) mappings = self.es_mapping(res) response = IndexManagementService._es_call( self.es.raw, '%s/%s/_mapping' % (k, res), 'POST', mappings) IndexManagementService._check_response(response) filters['filters'][k] = body #======================================================================================= # Get an instance of the datastore instance used to create the CouchDB filters # in support of the ElasticSearch river's filter # - Allows us to filter based on resource type #======================================================================================= cc = self.container db = cc.datastore_manager.get_datastore('resources') datastore_name = db.datastore_name db = db.server[datastore_name] db.create(filters) #-------------------------------------------------------------------------------- # Create the river connection between CouchDB and ElasticSearch #-------------------------------------------------------------------------------- for k, v in STD_INDEXES.iteritems(): response = IndexManagementService._es_call( self.es.river_couchdb_create, index_name=k, couchdb_db=datastore_name, couchdb_host=CFG.server.couchdb.host, couchdb_port=CFG.server.couchdb.port, couchdb_user=CFG.server.couchdb.username, couchdb_password=CFG.server.couchdb.password, couchdb_filter='filters/%s' % k, script=ELASTICSEARCH_CONTEXT_SCRIPT) IndexManagementService._check_response(response) #======================================================================================= # Create and map the edge indexes #======================================================================================= #-------------------------------------------------------------------------------- # Resources Index #-------------------------------------------------------------------------------- response = IndexManagementService._es_call( self.es.index_create, '%s_resources_index' % self.sysname, number_of_shards=self.index_shards, number_of_replicas=self.index_replicas) IndexManagementService._check_response(response) for t in RT.values(): mappings = self.es_mapping(t) response = IndexManagementService._es_call( self.es.raw, '%s_resources_index/%s/_mapping' % (self.sysname, t), 'POST', mappings) IndexManagementService._check_response(response) response = IndexManagementService._es_call( self.es.river_couchdb_create, index_name='%s_resources_index' % self.sysname, couchdb_db=datastore_name, couchdb_host=CFG.server.couchdb.host, couchdb_port=CFG.server.couchdb.port, couchdb_user=CFG.server.couchdb.username, couchdb_password=CFG.server.couchdb.password, script=ELASTICSEARCH_CONTEXT_SCRIPT) IndexManagementService._check_response(response) #-------------------------------------------------------------------------------- # Events Index #-------------------------------------------------------------------------------- response = IndexManagementService._es_call( self.es.index_create, '%s_events_index' % self.sysname, number_of_shards=self.index_shards, number_of_replicas=self.index_replicas) IndexManagementService._check_response(response) for event in get_events(): mappings = self.es_mapping(event) response = IndexManagementService._es_call( self.es.raw, '%s_events_index/%s/_mapping' % (self.sysname, event), 'POST', mappings) IndexManagementService._check_response(response) response = IndexManagementService._es_call( self.es.river_couchdb_create, index_name='%s_events_index' % self.sysname, couchdb_db='%s_events' % self.sysname, couchdb_host=CFG.server.couchdb.host, couchdb_port=CFG.server.couchdb.port, couchdb_user=CFG.server.couchdb.username, couchdb_password=CFG.server.couchdb.password, script=ELASTICSEARCH_CONTEXT_SCRIPT) IndexManagementService._check_response(response) #======================================================================================= # Construct the resources #======================================================================================= ims_cli = IndexManagementServiceClient() #-------------------------------------------------------------------------------- # Standard Indexes #-------------------------------------------------------------------------------- for index, resources in STD_INDEXES.iteritems(): ims_cli.create_index( name=index, description='%s ElasticSearch Index Resource' % index, content_type=IndexManagementService.ELASTICSEARCH_INDEX, options=self.attr_mapping(resources)) #-------------------------------------------------------------------------------- # CouchDB Indexes #-------------------------------------------------------------------------------- for index, datastore in COUCHDB_INDEXES.iteritems(): ims_cli.create_index( name=index, description='%s CouchDB Index Resource' % index, content_type=IndexManagementService.COUCHDB_INDEX, datastore_name=datastore) #-------------------------------------------------------------------------------- # Edge Indexes #-------------------------------------------------------------------------------- ims_cli.create_index( name='%s_resources_index' % self.sysname, description='Resources Index', content_type=IndexManagementService.ELASTICSEARCH_INDEX, options=self.attr_mapping(RT.keys())) ims_cli.create_index( name='%s_events_index' % self.sysname, description='Events Index', content_type=IndexManagementService.ELASTICSEARCH_INDEX, options=self.attr_mapping(get_events()))