def test_exception_safe_to_retry(self): x = ElasticsearchBackend(app=self.app) assert not x.exception_safe_to_retry(Exception("failed")) assert not x.exception_safe_to_retry(BaseException("failed")) assert x.exception_safe_to_retry(exceptions.ConflictError(409, "concurrent update", {})) assert x.exception_safe_to_retry(exceptions.ConnectionError(503, "service unavailable", {})) assert x.exception_safe_to_retry(exceptions.TransportError(429, "too many requests", {})) assert not x.exception_safe_to_retry(exceptions.NotFoundError(404, "not found", {}))
def put_template(self, *args, **kwargs): assert 'name' in kwargs and 'body' in kwargs name = kwargs['name'] if self.name is None: self.name = name else: assert self.name == name body = kwargs['body'] if self.body is None: self.body = body else: assert self.body == body if self.behavior: behavior = self.behavior.pop(0) if behavior == "exception": raise MockException() elif behavior == "ce": raise es_excs.ConnectionError(None, "fake ce", Exception()) elif behavior in ("500", "501", "502", "503", "504"): raise es_excs.TransportError( int(behavior), "fake 50x", Exception() ) return None
def _build_query(self, request): params = [ 'deploy_id', 'deploy_target', 'deploy_target_status', 'search', 'log_module', 'log_level' ] page, page_size = get_paginate_params(request) raw_dict = { param: request.query_params.get(param) for param in params if request.query_params.get(param) } self.sort_by = request.query_params.get('sort', 'log_timestamp') self.sort_order = request.query_params.get('sort_order', 'desc') timestamp = request.query_params.get( 'timestamp', datetime.now().isoformat()[:-3] + 'Z') direction = request.query_params.get('direction', 'backward') query_dict = {} try: deploy = TarsDeployment.objects.get(pk=raw_dict['deploy_id']) except (KeyError, ObjectDoesNotExist): raise exceptions.TransportError(400, 'deploy id error') must = [] should = None for k, v in raw_dict.iteritems(): if k == 'search': v = v.replace('-', ' ') must.append({ 'multi_match': { 'fields': ['deploy_target_ip', 'deploy_target_name'], 'query': v, "operator": "and" } }) elif k == 'deploy_target_status': should = [{'match': {k: i}} for i in v.split(',')] elif k == 'deploy_target': must.append({'terms': {k: v.split(',')}}) elif k == 'log_level': must.append({'terms': {k: [l.upper() for l in v.split(',')]}}) elif k == 'log_module': must.append({'match': {'_type': v}}) else: must.append({'match': {k: v}}) # Careful here, the pattern is (start_time, end_time) range_dict = {} if direction == 'backward': range_dict['lt'] = timestamp query_dict['size'] = page_size if page > 1: query_dict['from_'] = (page - 1) * page_size else: range_dict['gt'] = timestamp query_dict['size'] = 300 # query_dict['search_type'] = 'scan' # query_dict['scroll'] = '30s' must.append({'range': {'log_timestamp': range_dict}}) query_bool_dict = {'must': must} if should is not None: query_bool_dict['should'] = should query_bool_dict['minimum_should_match'] = 1 query_dict['index'] = deploy.log_index() query_dict['sort'] = "log_timestamp:desc" query_dict['body'] = {"query": {"bool": query_bool_dict}} self.query = query_dict